mirror of
https://github.com/elastic/elasticsearch.git
synced 2025-04-24 23:27:25 -04:00
Replace NOT operator with explicit false
check (#67817)
We have an in-house rule to compare explicitly against `false` instead of using the logical not operator (`!`). However, this hasn't historically been enforced, meaning that there are many violations in the source at present. We now have a Checkstyle rule that can detect these cases, but before we can turn it on, we need to fix the existing violations. This is being done over a series of PRs, since there are a lot to fix.
This commit is contained in:
parent
9a1611da80
commit
ad1f876daa
119 changed files with 264 additions and 246 deletions
|
@ -124,7 +124,7 @@ class NoticeTask extends DefaultTask {
|
|||
if (line.contains('*/')) {
|
||||
inNotice = false
|
||||
|
||||
if (!isPackageInfo) {
|
||||
if (isPackageInfo == false) {
|
||||
break
|
||||
}
|
||||
} else if (inNotice) {
|
||||
|
|
|
@ -105,7 +105,7 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
|
|||
private final Property<Boolean> failIfUnavailable;
|
||||
private final Configuration extracted;
|
||||
private Action<ElasticsearchDistribution> distributionFinalizer;
|
||||
private boolean froozen = false;
|
||||
private boolean frozen = false;
|
||||
|
||||
ElasticsearchDistribution(
|
||||
String name,
|
||||
|
@ -207,10 +207,10 @@ public class ElasticsearchDistribution implements Buildable, Iterable<File> {
|
|||
* runs distribution finalizer logic.
|
||||
*/
|
||||
public ElasticsearchDistribution maybeFreeze() {
|
||||
if (!froozen) {
|
||||
if (frozen == false) {
|
||||
finalizeValues();
|
||||
distributionFinalizer.execute(this);
|
||||
froozen = true;
|
||||
frozen = true;
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -83,7 +83,7 @@ public class InternalDistributionDownloadPlugin implements InternalPlugin {
|
|||
resolutions.register("bwc", distributionResolution -> distributionResolution.setResolver((project, distribution) -> {
|
||||
BwcVersions.UnreleasedVersionInfo unreleasedInfo = bwcVersions.unreleasedInfo(Version.fromString(distribution.getVersion()));
|
||||
if (unreleasedInfo != null) {
|
||||
if (!distribution.getBundledJdk()) {
|
||||
if (distribution.getBundledJdk() == false) {
|
||||
throw new GradleException(
|
||||
"Configuring a snapshot bwc distribution ('"
|
||||
+ distribution.getName()
|
||||
|
|
|
@ -106,7 +106,7 @@ public abstract class FilePermissionsTask extends DefaultTask {
|
|||
.map(file -> "Source file is executable: " + file)
|
||||
.collect(Collectors.toList());
|
||||
|
||||
if (!failures.isEmpty()) {
|
||||
if (failures.isEmpty() == false) {
|
||||
throw new GradleException("Found invalid file permissions:\n" + String.join("\n", failures));
|
||||
}
|
||||
|
||||
|
|
|
@ -1235,7 +1235,7 @@ public class ElasticsearchNode implements TestClusterConfiguration {
|
|||
String content = new String(Files.readAllBytes(jvmOptions));
|
||||
Map<String, String> expansions = jvmOptionExpansions();
|
||||
for (String origin : expansions.keySet()) {
|
||||
if (!content.contains(origin)) {
|
||||
if (content.contains(origin) == false) {
|
||||
throw new IOException("template property " + origin + " not found in template.");
|
||||
}
|
||||
content = content.replace(origin, expansions.get(origin));
|
||||
|
|
|
@ -39,13 +39,13 @@ public final class FileUtils {
|
|||
return;
|
||||
}
|
||||
|
||||
if (dir.exists() && !dir.isDirectory()) {
|
||||
if (dir.exists() && dir.isDirectory() == false) {
|
||||
throw new UncheckedIOException(String.format("Cannot create directory '%s' as it already exists, but is not a directory", dir));
|
||||
}
|
||||
|
||||
List<File> toCreate = new LinkedList<File>();
|
||||
File parent = dir.getParentFile();
|
||||
while (!parent.exists()) {
|
||||
while (parent.exists() == false) {
|
||||
toCreate.add(parent);
|
||||
parent = parent.getParentFile();
|
||||
}
|
||||
|
@ -55,7 +55,7 @@ public final class FileUtils {
|
|||
continue;
|
||||
}
|
||||
File parentDirToCreateParent = parentDirToCreate.getParentFile();
|
||||
if (!parentDirToCreateParent.isDirectory()) {
|
||||
if (parentDirToCreateParent.isDirectory() == false) {
|
||||
throw new UncheckedIOException(
|
||||
String.format(
|
||||
"Cannot create parent directory '%s' when creating directory '%s' as '%s' is not a directory",
|
||||
|
@ -65,13 +65,13 @@ public final class FileUtils {
|
|||
)
|
||||
);
|
||||
}
|
||||
if (!parentDirToCreate.mkdir() && !parentDirToCreate.isDirectory()) {
|
||||
if (parentDirToCreate.mkdir() == false && parentDirToCreate.isDirectory() == false) {
|
||||
throw new UncheckedIOException(
|
||||
String.format("Failed to create parent directory '%s' when creating directory '%s'", parentDirToCreate, dir)
|
||||
);
|
||||
}
|
||||
}
|
||||
if (!dir.mkdir() && !dir.isDirectory()) {
|
||||
if (dir.mkdir() == false && dir.isDirectory() == false) {
|
||||
throw new UncheckedIOException(String.format("Failed to create directory '%s'", dir));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
<property name="tokens" value="EXPR"/>
|
||||
<property name="limitedTokens" value="LNOT"/>
|
||||
<property name="maximumNumber" value="0"/>
|
||||
<property name="maximumDepth" value="1"/>
|
||||
<property name="maximumDepth" value="2"/>
|
||||
<message
|
||||
key="descendant.token.max"
|
||||
value="Do not negate boolean expressions with '!', but check explicitly with '== false' as it is more explicit"/>
|
||||
|
|
|
@ -37,7 +37,7 @@ public final class MetricsCalculator {
|
|||
Map<String, List<Sample>> samplesPerOperation = new HashMap<>();
|
||||
|
||||
for (Sample sample : samples) {
|
||||
if (!samplesPerOperation.containsKey(sample.getOperation())) {
|
||||
if (samplesPerOperation.containsKey(sample.getOperation()) == false) {
|
||||
samplesPerOperation.put(sample.getOperation(), new ArrayList<>());
|
||||
}
|
||||
samplesPerOperation.get(sample.getOperation()).add(sample);
|
||||
|
|
|
@ -78,7 +78,7 @@ final class IngestRequestConverters {
|
|||
|
||||
static Request simulatePipeline(SimulatePipelineRequest simulatePipelineRequest) throws IOException {
|
||||
RequestConverters.EndpointBuilder builder = new RequestConverters.EndpointBuilder().addPathPartAsIs("_ingest/pipeline");
|
||||
if (simulatePipelineRequest.getId() != null && !simulatePipelineRequest.getId().isEmpty()) {
|
||||
if (simulatePipelineRequest.getId() != null && simulatePipelineRequest.getId().isEmpty() == false) {
|
||||
builder.addPathPart(simulatePipelineRequest.getId());
|
||||
}
|
||||
builder.addPathPartAsIs("_simulate");
|
||||
|
|
|
@ -158,7 +158,7 @@ public final class GetAutoFollowPatternResponse {
|
|||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
if (!super.equals(o)) return false;
|
||||
if (super.equals(o) == false) return false;
|
||||
Pattern pattern = (Pattern) o;
|
||||
return Objects.equals(remoteCluster, pattern.remoteCluster) &&
|
||||
Objects.equals(leaderIndexPatterns, pattern.leaderIndexPatterns) &&
|
||||
|
|
|
@ -81,7 +81,7 @@ public final class PutAutoFollowPatternRequest extends FollowConfig implements V
|
|||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
if (!super.equals(o)) return false;
|
||||
if (super.equals(o) == false) return false;
|
||||
PutAutoFollowPatternRequest that = (PutAutoFollowPatternRequest) o;
|
||||
return Objects.equals(name, that.name) &&
|
||||
Objects.equals(remoteCluster, that.remoteCluster) &&
|
||||
|
|
|
@ -79,7 +79,7 @@ public final class PutFollowRequest extends FollowConfig implements Validatable,
|
|||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
if (!super.equals(o)) return false;
|
||||
if (super.equals(o) == false) return false;
|
||||
PutFollowRequest that = (PutFollowRequest) o;
|
||||
return Objects.equals(waitForActiveShards, that.waitForActiveShards) &&
|
||||
Objects.equals(remoteCluster, that.remoteCluster) &&
|
||||
|
|
|
@ -50,7 +50,7 @@ public final class ResumeFollowRequest extends FollowConfig implements Validatab
|
|||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
if (!super.equals(o)) return false;
|
||||
if (super.equals(o) == false) return false;
|
||||
ResumeFollowRequest that = (ResumeFollowRequest) o;
|
||||
return Objects.equals(followerIndex, that.followerIndex);
|
||||
}
|
||||
|
|
|
@ -64,7 +64,7 @@ public class MultiTermVectorsResponse {
|
|||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) return true;
|
||||
if (!(obj instanceof MultiTermVectorsResponse)) return false;
|
||||
if ((obj instanceof MultiTermVectorsResponse) == false) return false;
|
||||
MultiTermVectorsResponse other = (MultiTermVectorsResponse) obj;
|
||||
return Objects.equals(responses, other.responses);
|
||||
}
|
||||
|
|
|
@ -23,14 +23,15 @@ import org.elasticsearch.common.Nullable;
|
|||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
|
||||
public class TermVectorsResponse {
|
||||
private final String index;
|
||||
private final String id;
|
||||
|
@ -127,7 +128,7 @@ public class TermVectorsResponse {
|
|||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) return true;
|
||||
if (!(obj instanceof TermVectorsResponse)) return false;
|
||||
if ((obj instanceof TermVectorsResponse) == false) return false;
|
||||
TermVectorsResponse other = (TermVectorsResponse) obj;
|
||||
return index.equals(other.index)
|
||||
&& Objects.equals(id, other.id)
|
||||
|
@ -203,7 +204,7 @@ public class TermVectorsResponse {
|
|||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) return true;
|
||||
if (!(obj instanceof TermVector)) return false;
|
||||
if ((obj instanceof TermVector) == false) return false;
|
||||
TermVector other = (TermVector) obj;
|
||||
return fieldName.equals(other.fieldName)
|
||||
&& Objects.equals(fieldStatistics, other.fieldStatistics)
|
||||
|
@ -267,7 +268,7 @@ public class TermVectorsResponse {
|
|||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) return true;
|
||||
if (!(obj instanceof FieldStatistics)) return false;
|
||||
if ((obj instanceof FieldStatistics) == false) return false;
|
||||
FieldStatistics other = (FieldStatistics) obj;
|
||||
return docCount == other.docCount
|
||||
&& sumDocFreq == other.sumDocFreq
|
||||
|
@ -374,7 +375,7 @@ public class TermVectorsResponse {
|
|||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) return true;
|
||||
if (!(obj instanceof Term)) return false;
|
||||
if ((obj instanceof Term) == false) return false;
|
||||
Term other = (Term) obj;
|
||||
return term.equals(other.term)
|
||||
&& termFreq == other.termFreq
|
||||
|
@ -456,7 +457,7 @@ public class TermVectorsResponse {
|
|||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) return true;
|
||||
if (!(obj instanceof Token)) return false;
|
||||
if ((obj instanceof Token) == false) return false;
|
||||
Token other = (Token) obj;
|
||||
return Objects.equals(startOffset, other.startOffset)
|
||||
&& Objects.equals(endOffset,other.endOffset)
|
||||
|
|
|
@ -34,7 +34,7 @@ import java.util.Objects;
|
|||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
|
||||
/**
|
||||
* A Connection links exactly two {@link Vertex} objects. The basis of a
|
||||
* A Connection links exactly two {@link Vertex} objects. The basis of a
|
||||
* connection is one or more documents have been found that contain
|
||||
* this pair of terms and the strength of the connection is recorded
|
||||
* as a weight.
|
||||
|
@ -75,13 +75,13 @@ public class Connection {
|
|||
}
|
||||
|
||||
/**
|
||||
* @return the number of documents in the sampled set that contained this
|
||||
* @return the number of documents in the sampled set that contained this
|
||||
* pair of {@link Vertex} objects.
|
||||
*/
|
||||
public long getDocCount() {
|
||||
return docCount;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj)
|
||||
|
@ -107,7 +107,7 @@ public class Connection {
|
|||
private static final ParseField TARGET = new ParseField("target");
|
||||
private static final ParseField WEIGHT = new ParseField("weight");
|
||||
private static final ParseField DOC_COUNT = new ParseField("doc_count");
|
||||
|
||||
|
||||
|
||||
void toXContent(XContentBuilder builder, Params params, ObjectIntHashMap<Vertex> vertexNumbers) throws IOException {
|
||||
builder.field(SOURCE.getPreferredName(), vertexNumbers.get(from));
|
||||
|
@ -131,10 +131,10 @@ public class Connection {
|
|||
this.weight = weight;
|
||||
this.docCount = docCount;
|
||||
}
|
||||
public Connection resolve(List<Vertex> vertices) {
|
||||
public Connection resolve(List<Vertex> vertices) {
|
||||
return new Connection(vertices.get(fromIndex), vertices.get(toIndex), weight, docCount);
|
||||
}
|
||||
|
||||
|
||||
private static final ConstructingObjectParser<UnresolvedConnection, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"ConnectionParser", true,
|
||||
args -> {
|
||||
|
@ -150,13 +150,13 @@ public class Connection {
|
|||
PARSER.declareInt(constructorArg(), TARGET);
|
||||
PARSER.declareDouble(constructorArg(), WEIGHT);
|
||||
PARSER.declareLong(constructorArg(), DOC_COUNT);
|
||||
}
|
||||
}
|
||||
static UnresolvedConnection fromXContent(XContentParser parser) throws IOException {
|
||||
return PARSER.apply(parser, null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* An identifier (implements hashcode and equals) that represents a
|
||||
* unique key for a {@link Connection}
|
||||
|
@ -179,9 +179,9 @@ public class Connection {
|
|||
|
||||
ConnectionId vertexId = (ConnectionId) o;
|
||||
|
||||
if (source != null ? !source.equals(vertexId.source) : vertexId.source != null)
|
||||
if (source != null ? source.equals(vertexId.source) == false : vertexId.source != null)
|
||||
return false;
|
||||
if (target != null ? !target.equals(vertexId.target) : vertexId.target != null)
|
||||
if (target != null ? target.equals(vertexId.target) == false : vertexId.target != null)
|
||||
return false;
|
||||
|
||||
return true;
|
||||
|
@ -206,5 +206,5 @@ public class Connection {
|
|||
public String toString() {
|
||||
return getSource() + "->" + getTarget();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,9 +33,9 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optiona
|
|||
/**
|
||||
* A vertex in a graph response represents a single term (a field and value pair)
|
||||
* which appears in one or more documents found as part of the graph exploration.
|
||||
*
|
||||
* A vertex term could be a bank account number, an email address, a hashtag or any
|
||||
* other term that appears in documents and is interesting to represent in a network.
|
||||
*
|
||||
* A vertex term could be a bank account number, an email address, a hashtag or any
|
||||
* other term that appears in documents and is interesting to represent in a network.
|
||||
*/
|
||||
public class Vertex implements ToXContentFragment {
|
||||
|
||||
|
@ -51,7 +51,7 @@ public class Vertex implements ToXContentFragment {
|
|||
private static final ParseField DEPTH = new ParseField("depth");
|
||||
private static final ParseField FG = new ParseField("fg");
|
||||
private static final ParseField BG = new ParseField("bg");
|
||||
|
||||
|
||||
|
||||
public Vertex(String field, String term, double weight, int depth, long bg, long fg) {
|
||||
super();
|
||||
|
@ -62,12 +62,12 @@ public class Vertex implements ToXContentFragment {
|
|||
this.bg = bg;
|
||||
this.fg = fg;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(field, term, weight, depth, bg, fg);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj)
|
||||
|
@ -83,8 +83,8 @@ public class Vertex implements ToXContentFragment {
|
|||
fg == other.fg &&
|
||||
Objects.equals(field, other.field) &&
|
||||
Objects.equals(term, other.term);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
|
@ -99,8 +99,8 @@ public class Vertex implements ToXContentFragment {
|
|||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
private static final ConstructingObjectParser<Vertex, Void> PARSER = new ConstructingObjectParser<>(
|
||||
"VertexParser", true,
|
||||
args -> {
|
||||
|
@ -122,12 +122,12 @@ public class Vertex implements ToXContentFragment {
|
|||
PARSER.declareInt(constructorArg(), DEPTH);
|
||||
PARSER.declareLong(optionalConstructorArg(), BG);
|
||||
PARSER.declareLong(optionalConstructorArg(), FG);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static Vertex fromXContent(XContentParser parser) throws IOException {
|
||||
return PARSER.apply(parser, null);
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @return a {@link VertexId} object that uniquely identifies this Vertex
|
||||
|
@ -175,22 +175,22 @@ public class Vertex implements ToXContentFragment {
|
|||
|
||||
/**
|
||||
* If the {@link GraphExploreRequest#useSignificance(boolean)} is true (the default)
|
||||
* this statistic is available.
|
||||
* @return the number of documents in the index that contain this term (see bg_count in
|
||||
* this statistic is available.
|
||||
* @return the number of documents in the index that contain this term (see bg_count in
|
||||
* <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-significantterms-aggregation.html">
|
||||
* the significant_terms aggregation</a>)
|
||||
* the significant_terms aggregation</a>)
|
||||
*/
|
||||
public long getBg() {
|
||||
return bg;
|
||||
}
|
||||
|
||||
/**
|
||||
* If the {@link GraphExploreRequest#useSignificance(boolean)} is true (the default)
|
||||
* this statistic is available.
|
||||
* If the {@link GraphExploreRequest#useSignificance(boolean)} is true (the default)
|
||||
* this statistic is available.
|
||||
* Together with {@link #getBg()} these numbers are used to derive the significance of a term.
|
||||
* @return the number of documents in the sample of best matching documents that contain this term (see fg_count in
|
||||
* @return the number of documents in the sample of best matching documents that contain this term (see fg_count in
|
||||
* <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-significantterms-aggregation.html">
|
||||
* the significant_terms aggregation</a>)
|
||||
* the significant_terms aggregation</a>)
|
||||
*/
|
||||
public long getFg() {
|
||||
return fg;
|
||||
|
@ -206,7 +206,7 @@ public class Vertex implements ToXContentFragment {
|
|||
public int getHopDepth() {
|
||||
return depth;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* An identifier (implements hashcode and equals) that represents a
|
||||
* unique key for a {@link Vertex}
|
||||
|
@ -237,9 +237,9 @@ public class Vertex implements ToXContentFragment {
|
|||
|
||||
VertexId vertexId = (VertexId) o;
|
||||
|
||||
if (field != null ? !field.equals(vertexId.field) : vertexId.field != null)
|
||||
if (field != null ? field.equals(vertexId.field) == false : vertexId.field != null)
|
||||
return false;
|
||||
if (term != null ? !term.equals(vertexId.term) : vertexId.term != null)
|
||||
if (term != null ? term.equals(vertexId.term) == false : vertexId.term != null)
|
||||
return false;
|
||||
|
||||
return true;
|
||||
|
@ -256,6 +256,6 @@ public class Vertex implements ToXContentFragment {
|
|||
public String toString() {
|
||||
return field + ":" + term;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -63,7 +63,7 @@ public class GetLifecyclePolicyResponse implements ToXContentObject {
|
|||
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser);
|
||||
parser.nextToken();
|
||||
|
||||
while (!parser.isClosed()) {
|
||||
while (parser.isClosed() == false) {
|
||||
if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
|
||||
String policyName = parser.currentName();
|
||||
LifecyclePolicyMetadata policyDefinion = LifecyclePolicyMetadata.parse(parser, policyName);
|
||||
|
|
|
@ -157,7 +157,7 @@ public class GetFieldMappingsResponse {
|
|||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!(o instanceof FieldMappingMetadata)) return false;
|
||||
if ((o instanceof FieldMappingMetadata) == false) return false;
|
||||
FieldMappingMetadata that = (FieldMappingMetadata) o;
|
||||
return Objects.equals(fullName, that.fullName) && Objects.equals(source, that.source);
|
||||
}
|
||||
|
@ -177,7 +177,7 @@ public class GetFieldMappingsResponse {
|
|||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!(o instanceof GetFieldMappingsResponse)) return false;
|
||||
if ((o instanceof GetFieldMappingsResponse) == false) return false;
|
||||
GetFieldMappingsResponse that = (GetFieldMappingsResponse) o;
|
||||
return Objects.equals(mappings, that.mappings);
|
||||
}
|
||||
|
|
|
@ -216,7 +216,7 @@ public class GetIndexResponse {
|
|||
ensureExpectedToken(Token.START_OBJECT, parser.currentToken(), parser);
|
||||
parser.nextToken();
|
||||
|
||||
while (!parser.isClosed()) {
|
||||
while (parser.isClosed() == false) {
|
||||
if (parser.currentToken() == Token.START_OBJECT) {
|
||||
// we assume this is an index entry
|
||||
String indexName = parser.currentName();
|
||||
|
|
|
@ -64,7 +64,7 @@ public class ResizeResponse extends ShardsAcknowledgedResponse {
|
|||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
if (!super.equals(o)) return false;
|
||||
if (super.equals(o) == false) return false;
|
||||
ResizeResponse that = (ResizeResponse) o;
|
||||
return Objects.equals(index, that.index);
|
||||
}
|
||||
|
|
|
@ -136,7 +136,7 @@ public final class PutLicenseResponse {
|
|||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
if (!super.equals(o)) return false;
|
||||
if (super.equals(o) == false) return false;
|
||||
PutLicenseResponse that = (PutLicenseResponse) o;
|
||||
|
||||
return status == that.status &&
|
||||
|
|
|
@ -46,7 +46,7 @@ public class GetUsersRequest implements Validatable {
|
|||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!(o instanceof GetUsersRequest)) return false;
|
||||
if ((o instanceof GetUsersRequest) == false) return false;
|
||||
GetUsersRequest that = (GetUsersRequest) o;
|
||||
return Objects.equals(usernames, that.usernames);
|
||||
}
|
||||
|
|
|
@ -84,7 +84,7 @@ public class GetUsersResponse {
|
|||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!(o instanceof GetUsersResponse)) return false;
|
||||
if ((o instanceof GetUsersResponse) == false) return false;
|
||||
GetUsersResponse that = (GetUsersResponse) o;
|
||||
return Objects.equals(users, that.users);
|
||||
}
|
||||
|
|
|
@ -88,7 +88,7 @@ public class CancelTasksRequest implements Validatable {
|
|||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!(o instanceof CancelTasksRequest)) return false;
|
||||
if ((o instanceof CancelTasksRequest) == false) return false;
|
||||
CancelTasksRequest that = (CancelTasksRequest) o;
|
||||
return Objects.equals(getNodes(), that.getNodes()) &&
|
||||
Objects.equals(getActions(), that.getActions()) &&
|
||||
|
|
|
@ -200,7 +200,7 @@ public class ElasticsearchException {
|
|||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!(o instanceof ElasticsearchException)) return false;
|
||||
if ((o instanceof ElasticsearchException) == false) return false;
|
||||
ElasticsearchException that = (ElasticsearchException) o;
|
||||
return Objects.equals(getMsg(), that.getMsg()) &&
|
||||
Objects.equals(getCause(), that.getCause()) &&
|
||||
|
|
|
@ -77,7 +77,7 @@ public class GetTaskRequest implements Validatable {
|
|||
@Override
|
||||
public Optional<ValidationException> validate() {
|
||||
final ValidationException validationException = new ValidationException();
|
||||
if (timeout != null && !waitForCompletion) {
|
||||
if (timeout != null && waitForCompletion == false) {
|
||||
validationException.addValidationError("Timeout settings are only accepted if waitForCompletion is also set");
|
||||
}
|
||||
if (validationException.validationErrors().isEmpty()) {
|
||||
|
|
|
@ -110,7 +110,7 @@ public class ListTasksResponse {
|
|||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!(o instanceof ListTasksResponse)) return false;
|
||||
if ((o instanceof ListTasksResponse) == false) return false;
|
||||
ListTasksResponse response = (ListTasksResponse) o;
|
||||
return nodesInfoData.equals(response.nodesInfoData) &&
|
||||
Objects.equals
|
||||
|
|
|
@ -124,7 +124,7 @@ class NodeData {
|
|||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!(o instanceof NodeData)) return false;
|
||||
if ((o instanceof NodeData) == false) return false;
|
||||
NodeData nodeData = (NodeData) o;
|
||||
return Objects.equals(getNodeId(), nodeData.getNodeId()) &&
|
||||
Objects.equals(getName(), nodeData.getName()) &&
|
||||
|
|
|
@ -54,7 +54,7 @@ public class TaskGroup {
|
|||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!(o instanceof TaskGroup)) return false;
|
||||
if ((o instanceof TaskGroup) == false) return false;
|
||||
TaskGroup taskGroup = (TaskGroup) o;
|
||||
return Objects.equals(task, taskGroup.task) &&
|
||||
Objects.equals(getChildTasks(), taskGroup.getChildTasks());
|
||||
|
|
|
@ -78,7 +78,7 @@ public class TaskId {
|
|||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!(o instanceof TaskId)) return false;
|
||||
if ((o instanceof TaskId) == false) return false;
|
||||
TaskId taskId = (TaskId) o;
|
||||
return getId() == taskId.getId() &&
|
||||
Objects.equals(getNodeId(), taskId.getNodeId());
|
||||
|
|
|
@ -153,7 +153,7 @@ public class TaskInfo {
|
|||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!(o instanceof TaskInfo)) return false;
|
||||
if ((o instanceof TaskInfo) == false) return false;
|
||||
TaskInfo taskInfo = (TaskInfo) o;
|
||||
return getStartTime() == taskInfo.getStartTime() &&
|
||||
getRunningTimeNanos() == taskInfo.getRunningTimeNanos() &&
|
||||
|
|
|
@ -63,7 +63,7 @@ public class TaskOperationFailure {
|
|||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!(o instanceof TaskOperationFailure)) return false;
|
||||
if ((o instanceof TaskOperationFailure) == false) return false;
|
||||
TaskOperationFailure that = (TaskOperationFailure) o;
|
||||
return getTaskId() == that.getTaskId() &&
|
||||
Objects.equals(getNodeId(), that.getNodeId()) &&
|
||||
|
|
|
@ -57,7 +57,7 @@ public class AckWatchRequest implements Validatable {
|
|||
}
|
||||
}
|
||||
|
||||
if (!exception.validationErrors().isEmpty()) {
|
||||
if (exception.validationErrors().isEmpty() == false) {
|
||||
throw exception;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -128,8 +128,9 @@ public class IngestRequestConvertersTests extends ESTestCase {
|
|||
Request expectedRequest = IngestRequestConverters.simulatePipeline(request);
|
||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||
endpoint.add("_ingest/pipeline");
|
||||
if (pipelineId != null && !pipelineId.isEmpty())
|
||||
if (pipelineId != null && pipelineId.isEmpty() == false) {
|
||||
endpoint.add(pipelineId);
|
||||
}
|
||||
endpoint.add("_simulate");
|
||||
Assert.assertEquals(endpoint.toString(), expectedRequest.getEndpoint());
|
||||
Assert.assertEquals(HttpPost.METHOD_NAME, expectedRequest.getMethod());
|
||||
|
|
|
@ -1509,7 +1509,7 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
// Verify that the resulting REST request looks as expected.
|
||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||
String joinedIndices = String.join(",", indices);
|
||||
if (!joinedIndices.isEmpty()) {
|
||||
if (joinedIndices.isEmpty() == false) {
|
||||
endpoint.add(joinedIndices);
|
||||
}
|
||||
endpoint.add("_field_caps");
|
||||
|
@ -1550,7 +1550,7 @@ public class RequestConvertersTests extends ESTestCase {
|
|||
// Verify that the resulting REST request looks as expected.
|
||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||
String joinedIndices = String.join(",", indices);
|
||||
if (!joinedIndices.isEmpty()) {
|
||||
if (joinedIndices.isEmpty() == false) {
|
||||
endpoint.add(joinedIndices);
|
||||
}
|
||||
endpoint.add("_field_caps");
|
||||
|
|
|
@ -960,7 +960,7 @@ public class RestHighLevelClientTests extends ESTestCase {
|
|||
method.getReturnType().getSimpleName(), equalTo("boolean"));
|
||||
} else {
|
||||
// It's acceptable for 404s to be represented as empty Optionals
|
||||
if (!method.getReturnType().isAssignableFrom(Optional.class)) {
|
||||
if (method.getReturnType().isAssignableFrom(Optional.class) == false) {
|
||||
assertThat("the return type for method [" + method + "] is incorrect",
|
||||
method.getReturnType().getSimpleName(), endsWith("Response"));
|
||||
}
|
||||
|
|
|
@ -46,7 +46,7 @@ public class DetectionRuleTests extends AbstractXContentTestCase<DetectionRule>
|
|||
boolean hasScope = randomBoolean();
|
||||
boolean hasConditions = randomBoolean();
|
||||
|
||||
if (!hasScope && !hasConditions) {
|
||||
if (hasScope == false && hasConditions == false) {
|
||||
// at least one of the two should be present
|
||||
if (randomBoolean()) {
|
||||
hasScope = true;
|
||||
|
|
|
@ -73,7 +73,7 @@ public final class PreferHasAttributeNodeSelector implements NodeSelector {
|
|||
|
||||
List<String> values = attributes.get(key);
|
||||
|
||||
if (values == null || !values.contains(value)) {
|
||||
if (values == null || values.contains(value) == false) {
|
||||
nodeIterator.remove();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@ public class JavaVersion {
|
|||
public static final List<Integer> JAVA_11 = parse("11");
|
||||
|
||||
static List<Integer> parse(final String value) {
|
||||
if (!value.matches("^0*[0-9]+(\\.[0-9]+)*$")) {
|
||||
if (value.matches("^0*[0-9]+(\\.[0-9]+)*$") == false) {
|
||||
throw new IllegalArgumentException(value);
|
||||
}
|
||||
|
||||
|
|
|
@ -723,7 +723,7 @@ class InstallPluginCommand extends EnvironmentAwareCommand {
|
|||
|
||||
// be on the safe side: do not rely on that directories are always extracted
|
||||
// before their children (although this makes sense, but is it guaranteed?)
|
||||
if (!Files.isSymbolicLink(targetFile.getParent())) {
|
||||
if (Files.isSymbolicLink(targetFile.getParent()) == false) {
|
||||
Files.createDirectories(targetFile.getParent());
|
||||
}
|
||||
if (entry.isDirectory() == false) {
|
||||
|
|
|
@ -136,7 +136,7 @@ class RemovePluginCommand extends EnvironmentAwareCommand {
|
|||
|
||||
final Path pluginBinDir = env.binFile().resolve(pluginName);
|
||||
if (Files.exists(pluginBinDir)) {
|
||||
if (!Files.isDirectory(pluginBinDir)) {
|
||||
if (Files.isDirectory(pluginBinDir) == false) {
|
||||
throw new UserException(ExitCodes.IO_ERROR, "bin dir for " + pluginName + " is not a directory");
|
||||
}
|
||||
try (Stream<Path> paths = Files.list(pluginBinDir)) {
|
||||
|
|
|
@ -138,7 +138,7 @@ public class JarHell {
|
|||
// Eclipse adds this to the classpath when running unit tests...
|
||||
continue;
|
||||
}
|
||||
URL url = PathUtils.get(element).toUri().toURL();
|
||||
URL url = PathUtils.get(element).toUri().toURL();
|
||||
// junit4.childvm.count
|
||||
if (urlElements.add(url) == false && element.endsWith(".jar")) {
|
||||
throw new IllegalStateException("jar hell!" + System.lineSeparator() +
|
||||
|
@ -175,7 +175,7 @@ public class JarHell {
|
|||
continue;
|
||||
}
|
||||
if (path.toString().endsWith(".jar")) {
|
||||
if (!seenJars.add(path)) {
|
||||
if (seenJars.add(path) == false) {
|
||||
throw new IllegalStateException("jar hell!" + System.lineSeparator() +
|
||||
"duplicate jar on classpath: " + path);
|
||||
}
|
||||
|
@ -233,7 +233,7 @@ public class JarHell {
|
|||
}
|
||||
|
||||
public static void checkVersionFormat(String targetVersion) {
|
||||
if (!JavaVersion.isValid(targetVersion)) {
|
||||
if (JavaVersion.isValid(targetVersion) == false) {
|
||||
throw new IllegalStateException(
|
||||
String.format(
|
||||
Locale.ROOT,
|
||||
|
|
|
@ -54,7 +54,7 @@ public class JavaVersion implements Comparable<JavaVersion> {
|
|||
public static JavaVersion parse(String value) {
|
||||
Objects.requireNonNull(value);
|
||||
String prePart = null;
|
||||
if (!isValid(value)) {
|
||||
if (isValid(value) == false) {
|
||||
throw new IllegalArgumentException("Java version string [" + value + "] could not be parsed.");
|
||||
}
|
||||
List<Integer> version = new ArrayList<>();
|
||||
|
|
|
@ -79,7 +79,7 @@ public final class Booleans {
|
|||
}
|
||||
int strLen = str.length();
|
||||
for (int i = 0; i < strLen; i++) {
|
||||
if (!Character.isWhitespace(str.charAt(i))) {
|
||||
if (Character.isWhitespace(str.charAt(i)) == false) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
@ -128,7 +128,16 @@ public final class Booleans {
|
|||
if (value == null) {
|
||||
return defaultValue;
|
||||
}
|
||||
return !(value.equals("false") || value.equals("0") || value.equals("off") || value.equals("no"));
|
||||
switch (value) {
|
||||
case "false":
|
||||
case "0":
|
||||
case "off":
|
||||
case "no":
|
||||
return false;
|
||||
|
||||
default:
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -159,14 +168,14 @@ public final class Booleans {
|
|||
return text[offset] != '0';
|
||||
}
|
||||
if (length == 2) {
|
||||
return !(text[offset] == 'n' && text[offset + 1] == 'o');
|
||||
return (text[offset] == 'n' && text[offset + 1] == 'o') == false;
|
||||
}
|
||||
if (length == 3) {
|
||||
return !(text[offset] == 'o' && text[offset + 1] == 'f' && text[offset + 2] == 'f');
|
||||
return (text[offset] == 'o' && text[offset + 1] == 'f' && text[offset + 2] == 'f') == false;
|
||||
}
|
||||
if (length == 5) {
|
||||
return !(text[offset] == 'f' && text[offset + 1] == 'a' && text[offset + 2] == 'l' && text[offset + 3] == 's' &&
|
||||
text[offset + 4] == 'e');
|
||||
return (text[offset] == 'f' && text[offset + 1] == 'a' && text[offset + 2] == 'l' && text[offset + 3] == 's' &&
|
||||
text[offset + 4] == 'e') == false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -48,8 +48,8 @@ public class Tuple<V1, V2> {
|
|||
|
||||
Tuple<?, ?> tuple = (Tuple<?, ?>) o;
|
||||
|
||||
if (v1 != null ? !v1.equals(tuple.v1) : tuple.v1 != null) return false;
|
||||
if (v2 != null ? !v2.equals(tuple.v2) : tuple.v2 != null) return false;
|
||||
if (v1 != null ? v1.equals(tuple.v1) == false : tuple.v1 != null) return false;
|
||||
if (v2 != null ? v2.equals(tuple.v2) == false : tuple.v2 != null) return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -200,7 +200,7 @@ public final class IOUtils {
|
|||
*/
|
||||
public static void rm(final Path... locations) throws IOException {
|
||||
final LinkedHashMap<Path,Throwable> unremoved = rm(new LinkedHashMap<>(), locations);
|
||||
if (!unremoved.isEmpty()) {
|
||||
if (unremoved.isEmpty() == false) {
|
||||
final StringBuilder b = new StringBuilder("could not remove the following files (in the order of attempts):\n");
|
||||
for (final Map.Entry<Path,Throwable> kv : unremoved.entrySet()) {
|
||||
b.append(" ")
|
||||
|
|
|
@ -174,13 +174,13 @@ public final class DissectKey {
|
|||
|
||||
private static Modifier findModifier(String key) {
|
||||
Modifier modifier = Modifier.NONE;
|
||||
if (key != null && !key.isEmpty()) {
|
||||
if (key != null && key.isEmpty() == false) {
|
||||
Matcher matcher = MODIFIER_PATTERN.matcher(key);
|
||||
int matches = 0;
|
||||
while (matcher.find()) {
|
||||
Modifier priorModifier = modifier;
|
||||
modifier = Modifier.fromString(matcher.group());
|
||||
if (++matches > 1 && !(APPEND.equals(priorModifier) && APPEND_WITH_ORDER.equals(modifier))) {
|
||||
if (++matches > 1 && (APPEND.equals(priorModifier) && APPEND_WITH_ORDER.equals(modifier)) == false) {
|
||||
throw new DissectException.KeyParse(key, "multiple modifiers are not allowed.");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -240,9 +240,9 @@ public final class DissectParser {
|
|||
if (lookAheadMatches == delimiter.length) {
|
||||
//jump to the end of the match
|
||||
i += lookAheadMatches;
|
||||
if (!key.skipRightPadding()) {
|
||||
if (key.skipRightPadding() == false) {
|
||||
//progress the keys/delimiter if possible
|
||||
if (!it.hasNext()) {
|
||||
if (it.hasNext() == false) {
|
||||
break; //the while loop
|
||||
}
|
||||
dissectPair = it.next();
|
||||
|
@ -255,7 +255,7 @@ public final class DissectParser {
|
|||
}
|
||||
}
|
||||
//progress the keys/delimiter if possible
|
||||
if (!it.hasNext()) {
|
||||
if (it.hasNext() == false) {
|
||||
break; //the for loop
|
||||
}
|
||||
dissectPair = it.next();
|
||||
|
@ -272,7 +272,7 @@ public final class DissectParser {
|
|||
}
|
||||
//the last key, grab the rest of the input (unless consecutive delimiters already grabbed the last key)
|
||||
//and there is no trailing delimiter
|
||||
if (!dissectMatch.fullyMatched() && delimiter.length == 0 ) {
|
||||
if (dissectMatch.fullyMatched() == false && delimiter.length == 0 ) {
|
||||
byte[] value = Arrays.copyOfRange(input, valueStart, input.length);
|
||||
String valueString = new String(value, StandardCharsets.UTF_8);
|
||||
dissectMatch.add(key, valueString);
|
||||
|
@ -280,7 +280,7 @@ public final class DissectParser {
|
|||
}
|
||||
Map<String, String> results = dissectMatch.getResults();
|
||||
|
||||
if (!dissectMatch.isValid(results)) {
|
||||
if (dissectMatch.isValid(results) == false) {
|
||||
throw new DissectException.FindMatch(pattern, inputString);
|
||||
}
|
||||
return results;
|
||||
|
|
|
@ -326,7 +326,7 @@ public class DissectParserTests extends ESTestCase {
|
|||
while (tests.hasNext()) {
|
||||
JsonNode test = tests.next();
|
||||
boolean skip = test.path("skip").asBoolean();
|
||||
if (!skip) {
|
||||
if (skip == false) {
|
||||
String name = test.path("name").asText();
|
||||
logger.debug("Running Json specification: " + name);
|
||||
String pattern = test.path("tok").asText();
|
||||
|
|
|
@ -433,7 +433,7 @@ public class WellKnownText {
|
|||
private void closeLinearRingIfCoerced(ArrayList<Double> lats, ArrayList<Double> lons, ArrayList<Double> alts) {
|
||||
if (coerce && lats.isEmpty() == false && lons.isEmpty() == false) {
|
||||
int last = lats.size() - 1;
|
||||
if (!lats.get(0).equals(lats.get(last)) || !lons.get(0).equals(lons.get(last)) ||
|
||||
if (lats.get(0).equals(lats.get(last)) == false || lons.get(0).equals(lons.get(last)) == false ||
|
||||
(alts.isEmpty() == false && !alts.get(0).equals(alts.get(last)))) {
|
||||
lons.add(lons.get(0));
|
||||
lats.add(lats.get(0));
|
||||
|
|
|
@ -194,8 +194,9 @@ final class DerParser {
|
|||
* @return A parser for the construct.
|
||||
*/
|
||||
public DerParser getParser() throws IOException {
|
||||
if (!isConstructed())
|
||||
if (isConstructed() == false) {
|
||||
throw new IOException("Invalid DER: can't parse primitive entity"); //$NON-NLS-1$
|
||||
}
|
||||
|
||||
return new DerParser(value);
|
||||
}
|
||||
|
|
|
@ -48,7 +48,7 @@ public abstract class AbstractXContentParser implements XContentParser {
|
|||
public static final boolean DEFAULT_NUMBER_COERCE_POLICY = true;
|
||||
|
||||
private static void checkCoerceString(boolean coerce, Class<? extends Number> clazz) {
|
||||
if (!coerce) {
|
||||
if (coerce == false) {
|
||||
//Need to throw type IllegalArgumentException as current catch logic in
|
||||
//NumberFieldMapper.parseCreateField relies on this for "malformed" value detection
|
||||
throw new IllegalArgumentException(clazz.getSimpleName() + " value passed as String");
|
||||
|
@ -68,7 +68,7 @@ public abstract class AbstractXContentParser implements XContentParser {
|
|||
// If this behaviour is flagged as undesirable and any truncation occurs
|
||||
// then this method is called to trigger the"malformed" handling logic
|
||||
void ensureNumberConversion(boolean coerce, long result, Class<? extends Number> clazz) throws IOException {
|
||||
if (!coerce) {
|
||||
if (coerce == false) {
|
||||
double fullVal = doDoubleValue();
|
||||
if (result != fullVal) {
|
||||
// Need to throw type IllegalArgumentException as current catch
|
||||
|
|
|
@ -49,7 +49,7 @@ final class MatrixStatsAggregator extends MetricsAggregator {
|
|||
MatrixStatsAggregator(String name, Map<String, ValuesSource.Numeric> valuesSources, AggregationContext context,
|
||||
Aggregator parent, MultiValueMode multiValueMode, Map<String,Object> metadata) throws IOException {
|
||||
super(name, context, parent, metadata);
|
||||
if (valuesSources != null && !valuesSources.isEmpty()) {
|
||||
if (valuesSources != null && valuesSources.isEmpty() == false) {
|
||||
this.valuesSources = new NumericArrayValuesSource(valuesSources, multiValueMode);
|
||||
stats = context.bigArrays().newObjectArray(1);
|
||||
} else {
|
||||
|
|
|
@ -88,7 +88,7 @@ public abstract class ArrayValuesSourceParser<VS extends ValuesSource> implement
|
|||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]. " +
|
||||
"Multi-field aggregations do not support scripts.");
|
||||
} else if (!token(aggregationName, currentFieldName, token, parser, otherOptions)) {
|
||||
} else if (token(aggregationName, currentFieldName, token, parser, otherOptions) == false) {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "].");
|
||||
}
|
||||
|
@ -103,7 +103,7 @@ public abstract class ArrayValuesSourceParser<VS extends ValuesSource> implement
|
|||
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "]. " +
|
||||
"Multi-field aggregations do not support scripts.");
|
||||
|
||||
} else if (!token(aggregationName, currentFieldName, token, parser, otherOptions)) {
|
||||
} else if (token(aggregationName, currentFieldName, token, parser, otherOptions) == false) {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "].");
|
||||
}
|
||||
|
@ -122,11 +122,11 @@ public abstract class ArrayValuesSourceParser<VS extends ValuesSource> implement
|
|||
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "].");
|
||||
}
|
||||
}
|
||||
} else if (!token(aggregationName, currentFieldName, token, parser, otherOptions)) {
|
||||
} else if (token(aggregationName, currentFieldName, token, parser, otherOptions) == false) {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "].");
|
||||
}
|
||||
} else if (!token(aggregationName, currentFieldName, token, parser, otherOptions)) {
|
||||
} else if (token(aggregationName, currentFieldName, token, parser, otherOptions) == false) {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "].");
|
||||
}
|
||||
|
|
|
@ -64,8 +64,9 @@ public class MappingCharFilterFactory extends AbstractCharFilterFactory implemen
|
|||
private void parseRules(List<String> rules, NormalizeCharMap.Builder map) {
|
||||
for (String rule : rules) {
|
||||
Matcher m = rulePattern.matcher(rule);
|
||||
if (!m.find())
|
||||
if (m.find() == false) {
|
||||
throw new RuntimeException("Invalid Mapping Rule : [" + rule + "]");
|
||||
}
|
||||
String lhs = parseString(m.group(1).trim());
|
||||
String rhs = parseString(m.group(2).trim());
|
||||
if (lhs == null || rhs == null)
|
||||
|
|
|
@ -56,7 +56,7 @@ public class NGramTokenizerFactory extends AbstractTokenizerFactory {
|
|||
matchers.put("symbol", CharMatcher.Basic.SYMBOL);
|
||||
// Populate with unicode categories from java.lang.Character
|
||||
for (Field field : Character.class.getFields()) {
|
||||
if (!field.getName().startsWith("DIRECTIONALITY")
|
||||
if (field.getName().startsWith("DIRECTIONALITY") == false
|
||||
&& Modifier.isPublic(field.getModifiers())
|
||||
&& Modifier.isStatic(field.getModifiers())
|
||||
&& field.getType() == byte.class) {
|
||||
|
|
|
@ -39,7 +39,7 @@ public class PatternReplaceCharFilterFactory extends AbstractCharFilterFactory i
|
|||
super(indexSettings, name);
|
||||
|
||||
String sPattern = settings.get("pattern");
|
||||
if (!Strings.hasLength(sPattern)) {
|
||||
if (Strings.hasLength(sPattern) == false) {
|
||||
throw new IllegalArgumentException("pattern is missing for [" + name + "] char filter of type 'pattern_replace'");
|
||||
}
|
||||
pattern = Regex.compile(sPattern, settings.get("flags"));
|
||||
|
|
|
@ -71,7 +71,7 @@ class UniqueTokenFilter extends TokenFilter {
|
|||
System.arraycopy(term, 0, saved, 0, length);
|
||||
previous.add(saved);
|
||||
|
||||
if (!duplicate) {
|
||||
if (duplicate == false) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -126,8 +126,9 @@ public class WordDelimiterTokenFilterFactory extends AbstractTokenFilterFactory
|
|||
SortedMap<Character, Byte> typeMap = new TreeMap<>();
|
||||
for (String rule : rules) {
|
||||
Matcher m = typePattern.matcher(rule);
|
||||
if (!m.find())
|
||||
if (m.find() == false) {
|
||||
throw new RuntimeException("Invalid Mapping Rule : [" + rule + "]");
|
||||
}
|
||||
String lhs = parseString(m.group(1).trim());
|
||||
Byte rhs = parseType(m.group(2).trim());
|
||||
if (lhs.length() != 1)
|
||||
|
|
|
@ -102,7 +102,7 @@ public final class ConvertProcessor extends AbstractProcessor {
|
|||
}, AUTO {
|
||||
@Override
|
||||
public Object convert(Object value) {
|
||||
if (!(value instanceof String)) {
|
||||
if ((value instanceof String) == false) {
|
||||
return value;
|
||||
}
|
||||
try {
|
||||
|
|
|
@ -238,7 +238,7 @@ final class UserAgentParser {
|
|||
String name = null, major = null, minor = null, patch = null, build = null;
|
||||
Matcher matcher = pattern.matcher(agentString);
|
||||
|
||||
if (!matcher.find()) {
|
||||
if (matcher.find() == false) {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
|
|
@ -75,7 +75,7 @@ class DateMethodValueSource extends FieldDataValueSource {
|
|||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
if (!super.equals(o)) return false;
|
||||
if (super.equals(o) == false) return false;
|
||||
|
||||
DateMethodValueSource that = (DateMethodValueSource) o;
|
||||
|
||||
|
|
|
@ -77,7 +77,7 @@ class DateObjectValueSource extends FieldDataValueSource {
|
|||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
if (!super.equals(o)) return false;
|
||||
if (super.equals(o) == false) return false;
|
||||
|
||||
DateObjectValueSource that = (DateObjectValueSource) o;
|
||||
return methodName.equals(that.methodName);
|
||||
|
|
|
@ -445,7 +445,7 @@ public class ExpressionScriptEngine implements ScriptEngine {
|
|||
dateAccessor = true;
|
||||
}
|
||||
}
|
||||
if (!dateAccessor) {
|
||||
if (dateAccessor == false) {
|
||||
throw new IllegalArgumentException(
|
||||
"Variable [" + variable + "] does not follow an allowed format of either doc['field'] or doc['field'].method()"
|
||||
);
|
||||
|
|
|
@ -49,7 +49,7 @@ class FieldDataValueSource extends FieldDataBasedDoubleValuesSource {
|
|||
|
||||
FieldDataValueSource that = (FieldDataValueSource) o;
|
||||
|
||||
if (!fieldData.equals(that.fieldData)) return false;
|
||||
if (fieldData.equals(that.fieldData) == false) return false;
|
||||
return multiValueMode == that.multiValueMode;
|
||||
|
||||
}
|
||||
|
|
|
@ -64,7 +64,7 @@ final class GeoLongitudeValueSource extends FieldDataBasedDoubleValuesSource {
|
|||
if (obj == null) return false;
|
||||
if (getClass() != obj.getClass()) return false;
|
||||
GeoLongitudeValueSource other = (GeoLongitudeValueSource) obj;
|
||||
if (!fieldData.equals(other.fieldData)) return false;
|
||||
if (fieldData.equals(other.fieldData) == false) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
|
@ -93,7 +93,7 @@ public final class Def {
|
|||
static int getArrayLength(final Object[] array) { return array.length; }
|
||||
|
||||
static MethodHandle arrayLengthGetter(Class<?> arrayType) {
|
||||
if (!arrayType.isArray()) {
|
||||
if (arrayType.isArray() == false) {
|
||||
throw new IllegalArgumentException("type must be an array");
|
||||
}
|
||||
return (ARRAY_TYPE_MH_MAPPING.containsKey(arrayType)) ?
|
||||
|
@ -622,7 +622,7 @@ public final class Def {
|
|||
}
|
||||
|
||||
static MethodHandle newIterator(Class<?> arrayType) {
|
||||
if (!arrayType.isArray()) {
|
||||
if (arrayType.isArray() == false) {
|
||||
throw new IllegalArgumentException("type must be an array");
|
||||
}
|
||||
return (ARRAY_TYPE_MH_MAPPING.containsKey(arrayType)) ?
|
||||
|
@ -1269,7 +1269,7 @@ public final class Def {
|
|||
static int normalizeIndex(final Object[] array, final int index) { return index >= 0 ? index : index + array.length; }
|
||||
|
||||
static MethodHandle arrayIndexNormalizer(Class<?> arrayType) {
|
||||
if (!arrayType.isArray()) {
|
||||
if (arrayType.isArray() == false) {
|
||||
throw new IllegalArgumentException("type must be an array");
|
||||
}
|
||||
return (ARRAY_TYPE_MH_MAPPING.containsKey(arrayType)) ?
|
||||
|
|
|
@ -244,7 +244,7 @@ public final class MethodWriter extends GeneratorAdapter {
|
|||
if (from != boolean.class && from.isPrimitive() && to != boolean.class && to.isPrimitive()) {
|
||||
cast(getType(from), getType(to));
|
||||
} else {
|
||||
if (!to.isAssignableFrom(from)) {
|
||||
if (to.isAssignableFrom(from) == false) {
|
||||
checkCast(getType(to));
|
||||
}
|
||||
}
|
||||
|
@ -357,7 +357,7 @@ public final class MethodWriter extends GeneratorAdapter {
|
|||
// so we don't need a special NPE guard.
|
||||
// otherwise, we need to allow nulls for possible string concatenation.
|
||||
boolean hasPrimitiveArg = lhs.isPrimitive() || rhs.isPrimitive();
|
||||
if (!hasPrimitiveArg) {
|
||||
if (hasPrimitiveArg == false) {
|
||||
flags |= DefBootstrap.OPERATOR_ALLOWS_NULL;
|
||||
}
|
||||
invokeDefCall("add", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
||||
|
@ -466,7 +466,7 @@ public final class MethodWriter extends GeneratorAdapter {
|
|||
|
||||
@Override
|
||||
public void endMethod() {
|
||||
if (stringConcatArgs != null && !stringConcatArgs.isEmpty()) {
|
||||
if (stringConcatArgs != null && stringConcatArgs.isEmpty() == false) {
|
||||
throw new IllegalStateException("String concat bytecode not completed.");
|
||||
}
|
||||
super.endMethod();
|
||||
|
|
|
@ -89,7 +89,7 @@ public interface PainlessScript {
|
|||
}
|
||||
break;
|
||||
// but filter our own internal stacks (e.g. indy bootstrap)
|
||||
} else if (!shouldFilter(element)) {
|
||||
} else if (shouldFilter(element) == false) {
|
||||
scriptStack.add(element.toString());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -459,7 +459,7 @@ public final class PainlessScriptEngine implements ScriptEngine {
|
|||
throw new IllegalArgumentException("[painless.regex.limit-factor] can only be set on node startup.");
|
||||
}
|
||||
|
||||
if (!copy.isEmpty()) {
|
||||
if (copy.isEmpty() == false) {
|
||||
throw new IllegalArgumentException("Unrecognized compile-time parameter(s): " + copy);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1348,7 +1348,7 @@ public class DefaultIRTreeToASMBytesPhase implements IRTreeVisitor<WriteScope> {
|
|||
PainlessMethod getterPainlessMethod = irDotSubShortcutNode.getDecorationValue(IRDMethod.class);
|
||||
methodWriter.invokeMethodCall(getterPainlessMethod);
|
||||
|
||||
if (!getterPainlessMethod.returnType.equals(getterPainlessMethod.javaMethod.getReturnType())) {
|
||||
if (getterPainlessMethod.returnType.equals(getterPainlessMethod.javaMethod.getReturnType()) == false) {
|
||||
methodWriter.checkCast(MethodWriter.getType(getterPainlessMethod.returnType));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -294,7 +294,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
String childId = "c" + i;
|
||||
builders.add(createIndexRequest("test", "child", childId, previousParentId, "c_field", childId));
|
||||
|
||||
if (!parentToChildren.containsKey(previousParentId)) {
|
||||
if (parentToChildren.containsKey(previousParentId) == false) {
|
||||
parentToChildren.put(previousParentId, new HashSet<>());
|
||||
}
|
||||
assertThat(parentToChildren.get(previousParentId).add(childId), is(true));
|
||||
|
|
|
@ -406,9 +406,9 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
|
|||
|
||||
if (minChildren != that.minChildren) return false;
|
||||
if (maxChildren != that.maxChildren) return false;
|
||||
if (!toQuery.equals(that.toQuery)) return false;
|
||||
if (!innerQuery.equals(that.innerQuery)) return false;
|
||||
if (!joinField.equals(that.joinField)) return false;
|
||||
if (toQuery.equals(that.toQuery) == false) return false;
|
||||
if (innerQuery.equals(that.innerQuery) == false) return false;
|
||||
if (joinField.equals(that.joinField) == false) return false;
|
||||
return scoreMode == that.scoreMode;
|
||||
}
|
||||
|
||||
|
|
|
@ -455,7 +455,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
|
|||
throw new QueryShardException(context, "field [" + field + "] does not exist");
|
||||
}
|
||||
|
||||
if (!(fieldType instanceof PercolatorFieldMapper.PercolatorFieldType)) {
|
||||
if ((fieldType instanceof PercolatorFieldMapper.PercolatorFieldType) == false) {
|
||||
throw new QueryShardException(context, "expected field [" + field +
|
||||
"] to be of type [percolator], but is of type [" + fieldType.typeName() + "]");
|
||||
}
|
||||
|
|
|
@ -122,7 +122,7 @@ public abstract class AbstractAsyncBulkByScrollAction<Request extends AbstractBu
|
|||
this.task = task;
|
||||
this.scriptService = scriptService;
|
||||
this.sslConfig = sslConfig;
|
||||
if (!task.isWorker()) {
|
||||
if (task.isWorker() == false) {
|
||||
throw new IllegalArgumentException("Given task [" + task.getId() + "] must have a child worker");
|
||||
}
|
||||
this.worker = task.getWorkerState();
|
||||
|
|
|
@ -52,7 +52,7 @@ public class Netty4TransportPublishAddressIT extends ESNetty4IntegTestCase {
|
|||
}
|
||||
|
||||
public void testDifferentPorts() throws Exception {
|
||||
if (!NetworkUtils.SUPPORTS_V6) {
|
||||
if (NetworkUtils.SUPPORTS_V6 == false) {
|
||||
return;
|
||||
}
|
||||
logger.info("--> starting a node on ipv4 only");
|
||||
|
|
|
@ -162,7 +162,7 @@ public class Netty4HttpRequest implements HttpRequest {
|
|||
String cookieString = request.headers().get(HttpHeaderNames.COOKIE);
|
||||
if (cookieString != null) {
|
||||
Set<Cookie> cookies = ServerCookieDecoder.STRICT.decode(cookieString);
|
||||
if (!cookies.isEmpty()) {
|
||||
if (cookies.isEmpty() == false) {
|
||||
return ServerCookieEncoder.STRICT.encode(cookies);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -49,7 +49,7 @@ public class Netty4Utils {
|
|||
public static void setAvailableProcessors(final int availableProcessors) {
|
||||
// we set this to false in tests to avoid tests that randomly set processors from stepping on each other
|
||||
final boolean set = Booleans.parseBoolean(System.getProperty("es.set.netty.runtime.available.processors", "true"));
|
||||
if (!set) {
|
||||
if (set == false) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
|
@ -75,11 +75,11 @@ public class Netty4HttpPipeliningHandlerTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private void shutdownExecutorService() throws InterruptedException {
|
||||
if (!handlerService.isShutdown()) {
|
||||
if (handlerService.isShutdown() == false) {
|
||||
handlerService.shutdown();
|
||||
handlerService.awaitTermination(10, TimeUnit.SECONDS);
|
||||
}
|
||||
if (!eventLoopService.isShutdown()) {
|
||||
if (eventLoopService.isShutdown() == false) {
|
||||
eventLoopService.shutdown();
|
||||
eventLoopService.awaitTermination(10, TimeUnit.SECONDS);
|
||||
}
|
||||
|
|
|
@ -39,7 +39,7 @@ public class IcuAnalyzerProvider extends AbstractIndexAnalyzerProvider<Analyzer>
|
|||
super(indexSettings, name, settings);
|
||||
String method = settings.get("method", "nfkc_cf");
|
||||
String mode = settings.get("mode", "compose");
|
||||
if (!"compose".equals(mode) && !"decompose".equals(mode)) {
|
||||
if ("compose".equals(mode) == false && "decompose".equals(mode) == false) {
|
||||
throw new IllegalArgumentException("Unknown mode [" + mode + "] in analyzer [" + name +
|
||||
"], expected one of [compose, decompose]");
|
||||
}
|
||||
|
|
|
@ -44,7 +44,7 @@ public class IcuNormalizerCharFilterFactory extends AbstractCharFilterFactory im
|
|||
super(indexSettings, name);
|
||||
String method = settings.get("name", "nfkc_cf");
|
||||
String mode = settings.get("mode");
|
||||
if (!"compose".equals(mode) && !"decompose".equals(mode)) {
|
||||
if ("compose".equals(mode) == false && "decompose".equals(mode) == false) {
|
||||
mode = "compose";
|
||||
}
|
||||
Normalizer2 normalizer = Normalizer2.getInstance(
|
||||
|
|
|
@ -19,6 +19,9 @@
|
|||
|
||||
package org.elasticsearch.index.analysis.phonetic;
|
||||
|
||||
import org.apache.commons.codec.EncoderException;
|
||||
import org.apache.commons.codec.StringEncoder;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
|
@ -28,9 +31,6 @@ import java.util.Set;
|
|||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.codec.EncoderException;
|
||||
import org.apache.commons.codec.StringEncoder;
|
||||
|
||||
/**
|
||||
* Kölner Phonetik
|
||||
*
|
||||
|
@ -141,14 +141,14 @@ public class KoelnerPhonetik implements StringEncoder {
|
|||
String primaryForm = str;
|
||||
List<String> parts = new ArrayList<>();
|
||||
parts.add(primaryForm.replaceAll("[^\\p{L}\\p{N}]", ""));
|
||||
if (!primary) {
|
||||
if (primary == false) {
|
||||
List<String> tmpParts = new ArrayList<>(Arrays.asList(str.split("[\\p{Z}\\p{C}\\p{P}]")));
|
||||
int numberOfParts = tmpParts.size();
|
||||
while (tmpParts.size() > 0) {
|
||||
StringBuilder part = new StringBuilder();
|
||||
for (int i = 0; i < tmpParts.size(); i++) {
|
||||
part.append(tmpParts.get(i));
|
||||
if (!(i + 1 == numberOfParts)) {
|
||||
if ((i + 1 == numberOfParts) == false) {
|
||||
parts.add(part.toString());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -48,7 +48,7 @@ import java.util.ArrayList;
|
|||
import java.util.List;
|
||||
|
||||
public class AzureSeedHostsProvider implements SeedHostsProvider {
|
||||
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(AzureSeedHostsProvider.class);
|
||||
|
||||
public enum HostType {
|
||||
|
@ -182,7 +182,7 @@ public class AzureSeedHostsProvider implements SeedHostsProvider {
|
|||
}
|
||||
|
||||
// If provided, we check the deployment name
|
||||
if (Strings.hasLength(deploymentName) && !deploymentName.equals(deployment.getName())) {
|
||||
if (Strings.hasLength(deploymentName) && deploymentName.equals(deployment.getName()) == false) {
|
||||
logger.debug("current deployment name [{}] different from [{}]. skipping...",
|
||||
deployment.getName(), deploymentName);
|
||||
continue;
|
||||
|
|
|
@ -121,7 +121,7 @@ class AwsEc2SeedHostsProvider implements SeedHostsProvider {
|
|||
for (final Reservation reservation : descInstances.getReservations()) {
|
||||
for (final Instance instance : reservation.getInstances()) {
|
||||
// lets see if we can filter based on groups
|
||||
if (!groups.isEmpty()) {
|
||||
if (groups.isEmpty() == false) {
|
||||
final List<GroupIdentifier> instanceSecurityGroups = instance.getSecurityGroups();
|
||||
final List<String> securityGroupNames = new ArrayList<>(instanceSecurityGroups.size());
|
||||
final List<String> securityGroupIds = new ArrayList<>(instanceSecurityGroups.size());
|
||||
|
@ -140,7 +140,7 @@ class AwsEc2SeedHostsProvider implements SeedHostsProvider {
|
|||
}
|
||||
} else {
|
||||
// We need tp match all group names or group ids, otherwise we ignore this instance
|
||||
if (!(securityGroupNames.containsAll(groups) || securityGroupIds.containsAll(groups))) {
|
||||
if ((securityGroupNames.containsAll(groups) || securityGroupIds.containsAll(groups)) == false) {
|
||||
logger.trace("filtering out instance {} based on groups {}, does not include all of {}",
|
||||
instance.getInstanceId(), instanceSecurityGroups, groups);
|
||||
// continue to the next instance
|
||||
|
@ -209,7 +209,7 @@ class AwsEc2SeedHostsProvider implements SeedHostsProvider {
|
|||
);
|
||||
}
|
||||
|
||||
if (!availabilityZones.isEmpty()) {
|
||||
if (availabilityZones.isEmpty() == false) {
|
||||
// OR relationship amongst multiple values of the availability-zone filter
|
||||
describeInstancesRequest.withFilters(
|
||||
new Filter("availability-zone").withValues(availabilityZones)
|
||||
|
|
|
@ -49,7 +49,7 @@ import java.util.function.Function;
|
|||
import static java.util.Collections.emptyList;
|
||||
|
||||
public class GceSeedHostsProvider implements SeedHostsProvider {
|
||||
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(GceSeedHostsProvider.class);
|
||||
|
||||
/**
|
||||
|
@ -171,7 +171,7 @@ public class GceSeedHostsProvider implements SeedHostsProvider {
|
|||
break;
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
if (found == false) {
|
||||
filterByTag = true;
|
||||
break;
|
||||
}
|
||||
|
|
|
@ -58,7 +58,7 @@ public class AzureBlobContainer extends AbstractBlobContainer {
|
|||
|
||||
private InputStream openInputStream(String blobName, long position, @Nullable Long length) throws IOException {
|
||||
logger.trace("readBlob({}) from position [{}] with length [{}]", blobName, position, length != null ? length : "unlimited");
|
||||
if (blobStore.getLocationMode() == LocationMode.SECONDARY_ONLY && !blobExists(blobName)) {
|
||||
if (blobStore.getLocationMode() == LocationMode.SECONDARY_ONLY && blobExists(blobName) == false) {
|
||||
// On Azure, if the location path is a secondary location, and the blob does not
|
||||
// exist, instead of returning immediately from the getInputStream call below
|
||||
// with a 404 StorageException, Azure keeps trying and trying for a long timeout
|
||||
|
|
|
@ -64,7 +64,7 @@ final class GoogleCloudStorageHttpStatsCollector implements HttpResponseIntercep
|
|||
@Override
|
||||
public void interceptResponse(final HttpResponse response) {
|
||||
// TODO keep track of unsuccessful requests in different entries
|
||||
if (!response.isSuccessStatusCode())
|
||||
if (response.isSuccessStatusCode() == false)
|
||||
return;
|
||||
|
||||
final HttpRequest request = response.getRequest();
|
||||
|
|
|
@ -50,7 +50,7 @@ final class HdfsBlobStore implements BlobStore {
|
|||
this.bufferSize = bufferSize;
|
||||
this.root = execute(fileContext1 -> fileContext1.makeQualified(new Path(path)));
|
||||
this.readOnly = readOnly;
|
||||
if (!readOnly) {
|
||||
if (readOnly == false) {
|
||||
try {
|
||||
mkdirs(root);
|
||||
} catch (FileAlreadyExistsException ok) {
|
||||
|
@ -78,7 +78,7 @@ final class HdfsBlobStore implements BlobStore {
|
|||
|
||||
private Path buildHdfsPath(BlobPath blobPath) {
|
||||
final Path path = translateToHdfsPath(blobPath);
|
||||
if (!readOnly) {
|
||||
if (readOnly == false) {
|
||||
try {
|
||||
mkdirs(path);
|
||||
} catch (FileAlreadyExistsException ok) {
|
||||
|
|
|
@ -161,7 +161,7 @@ public class NioHttpRequest implements HttpRequest {
|
|||
String cookieString = request.headers().get(HttpHeaderNames.COOKIE);
|
||||
if (cookieString != null) {
|
||||
Set<Cookie> cookies = ServerCookieDecoder.STRICT.decode(cookieString);
|
||||
if (!cookies.isEmpty()) {
|
||||
if (cookies.isEmpty() == false) {
|
||||
return ServerCookieEncoder.STRICT.encode(cookies);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -75,11 +75,11 @@ public class NioHttpPipeliningHandlerTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private void shutdownExecutorService() throws InterruptedException {
|
||||
if (!handlerService.isShutdown()) {
|
||||
if (handlerService.isShutdown() == false) {
|
||||
handlerService.shutdown();
|
||||
handlerService.awaitTermination(10, TimeUnit.SECONDS);
|
||||
}
|
||||
if (!eventLoopService.isShutdown()) {
|
||||
if (eventLoopService.isShutdown() == false) {
|
||||
eventLoopService.shutdown();
|
||||
eventLoopService.awaitTermination(10, TimeUnit.SECONDS);
|
||||
}
|
||||
|
|
|
@ -37,7 +37,7 @@ public class SystemCallFilterTests extends ESTestCase {
|
|||
// otherwise, since we don't have TSYNC support, rules are not applied to the test thread
|
||||
// (randomizedrunner class initialization happens in its own thread, after the test thread is created)
|
||||
// instead we just forcefully run it for the test thread here.
|
||||
if (!JNANatives.LOCAL_SYSTEM_CALL_FILTER_ALL) {
|
||||
if (JNANatives.LOCAL_SYSTEM_CALL_FILTER_ALL == false) {
|
||||
try {
|
||||
SystemCallFilter.init(createTempDir());
|
||||
} catch (Exception e) {
|
||||
|
|
|
@ -126,7 +126,7 @@ public class DelayedShardAggregationBuilder extends AbstractAggregationBuilder<D
|
|||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
if (!super.equals(o)) return false;
|
||||
if (super.equals(o) == false) return false;
|
||||
DelayedShardAggregationBuilder that = (DelayedShardAggregationBuilder) o;
|
||||
return Objects.equals(delay, that.delay);
|
||||
}
|
||||
|
|
|
@ -101,7 +101,7 @@ public class MockBigArrays extends BigArrays {
|
|||
|
||||
public static void ensureAllArraysAreReleased() throws Exception {
|
||||
final Map<Object, Object> masterCopy = new HashMap<>(ACQUIRED_ARRAYS);
|
||||
if (!masterCopy.isEmpty()) {
|
||||
if (masterCopy.isEmpty() == false) {
|
||||
// not empty, we might be executing on a shared cluster that keeps on obtaining
|
||||
// and releasing arrays, lets make sure that after a reasonable timeout, all master
|
||||
// copy (snapshot) have been released
|
||||
|
@ -110,7 +110,7 @@ public class MockBigArrays extends BigArrays {
|
|||
} catch (AssertionError ex) {
|
||||
masterCopy.keySet().retainAll(ACQUIRED_ARRAYS.keySet());
|
||||
ACQUIRED_ARRAYS.keySet().removeAll(masterCopy.keySet()); // remove all existing master copy we will report on
|
||||
if (!masterCopy.isEmpty()) {
|
||||
if (masterCopy.isEmpty() == false) {
|
||||
Iterator<Object> causes = masterCopy.values().iterator();
|
||||
Object firstCause = causes.next();
|
||||
RuntimeException exception = new RuntimeException(masterCopy.size() + " arrays have not been released",
|
||||
|
@ -170,7 +170,7 @@ public class MockBigArrays extends BigArrays {
|
|||
@Override
|
||||
public ByteArray newByteArray(long size, boolean clearOnResize) {
|
||||
final ByteArrayWrapper array = new ByteArrayWrapper(super.newByteArray(size, clearOnResize), clearOnResize);
|
||||
if (!clearOnResize) {
|
||||
if (clearOnResize == false) {
|
||||
array.randomizeContent(0, size);
|
||||
}
|
||||
return array;
|
||||
|
@ -187,7 +187,7 @@ public class MockBigArrays extends BigArrays {
|
|||
} else {
|
||||
arr = new ByteArrayWrapper(array, arr.clearOnResize);
|
||||
}
|
||||
if (!arr.clearOnResize) {
|
||||
if (arr.clearOnResize == false) {
|
||||
arr.randomizeContent(originalSize, size);
|
||||
}
|
||||
return arr;
|
||||
|
@ -196,7 +196,7 @@ public class MockBigArrays extends BigArrays {
|
|||
@Override
|
||||
public IntArray newIntArray(long size, boolean clearOnResize) {
|
||||
final IntArrayWrapper array = new IntArrayWrapper(super.newIntArray(size, clearOnResize), clearOnResize);
|
||||
if (!clearOnResize) {
|
||||
if (clearOnResize == false) {
|
||||
array.randomizeContent(0, size);
|
||||
}
|
||||
return array;
|
||||
|
@ -213,7 +213,7 @@ public class MockBigArrays extends BigArrays {
|
|||
} else {
|
||||
arr = new IntArrayWrapper(array, arr.clearOnResize);
|
||||
}
|
||||
if (!arr.clearOnResize) {
|
||||
if (arr.clearOnResize == false) {
|
||||
arr.randomizeContent(originalSize, size);
|
||||
}
|
||||
return arr;
|
||||
|
@ -222,7 +222,7 @@ public class MockBigArrays extends BigArrays {
|
|||
@Override
|
||||
public LongArray newLongArray(long size, boolean clearOnResize) {
|
||||
final LongArrayWrapper array = new LongArrayWrapper(super.newLongArray(size, clearOnResize), clearOnResize);
|
||||
if (!clearOnResize) {
|
||||
if (clearOnResize == false) {
|
||||
array.randomizeContent(0, size);
|
||||
}
|
||||
return array;
|
||||
|
@ -239,7 +239,7 @@ public class MockBigArrays extends BigArrays {
|
|||
} else {
|
||||
arr = new LongArrayWrapper(array, arr.clearOnResize);
|
||||
}
|
||||
if (!arr.clearOnResize) {
|
||||
if (arr.clearOnResize == false) {
|
||||
arr.randomizeContent(originalSize, size);
|
||||
}
|
||||
return arr;
|
||||
|
@ -248,7 +248,7 @@ public class MockBigArrays extends BigArrays {
|
|||
@Override
|
||||
public FloatArray newFloatArray(long size, boolean clearOnResize) {
|
||||
final FloatArrayWrapper array = new FloatArrayWrapper(super.newFloatArray(size, clearOnResize), clearOnResize);
|
||||
if (!clearOnResize) {
|
||||
if (clearOnResize == false) {
|
||||
array.randomizeContent(0, size);
|
||||
}
|
||||
return array;
|
||||
|
@ -265,7 +265,7 @@ public class MockBigArrays extends BigArrays {
|
|||
} else {
|
||||
arr = new FloatArrayWrapper(array, arr.clearOnResize);
|
||||
}
|
||||
if (!arr.clearOnResize) {
|
||||
if (arr.clearOnResize == false) {
|
||||
arr.randomizeContent(originalSize, size);
|
||||
}
|
||||
return arr;
|
||||
|
@ -274,7 +274,7 @@ public class MockBigArrays extends BigArrays {
|
|||
@Override
|
||||
public DoubleArray newDoubleArray(long size, boolean clearOnResize) {
|
||||
final DoubleArrayWrapper array = new DoubleArrayWrapper(super.newDoubleArray(size, clearOnResize), clearOnResize);
|
||||
if (!clearOnResize) {
|
||||
if (clearOnResize == false) {
|
||||
array.randomizeContent(0, size);
|
||||
}
|
||||
return array;
|
||||
|
@ -291,7 +291,7 @@ public class MockBigArrays extends BigArrays {
|
|||
} else {
|
||||
arr = new DoubleArrayWrapper(array, arr.clearOnResize);
|
||||
}
|
||||
if (!arr.clearOnResize) {
|
||||
if (arr.clearOnResize == false) {
|
||||
arr.randomizeContent(originalSize, size);
|
||||
}
|
||||
return arr;
|
||||
|
|
|
@ -41,16 +41,16 @@ public class MockPageCacheRecycler extends PageCacheRecycler {
|
|||
|
||||
public static void ensureAllPagesAreReleased() throws Exception {
|
||||
final Map<Object, Throwable> masterCopy = new HashMap<>(ACQUIRED_PAGES);
|
||||
if (!masterCopy.isEmpty()) {
|
||||
if (masterCopy.isEmpty() == false) {
|
||||
// not empty, we might be executing on a shared cluster that keeps on obtaining
|
||||
// and releasing pages, lets make sure that after a reasonable timeout, all master
|
||||
// copy (snapshot) have been released
|
||||
final boolean success =
|
||||
waitUntil(() -> Sets.haveEmptyIntersection(masterCopy.keySet(), ACQUIRED_PAGES.keySet()));
|
||||
if (!success) {
|
||||
if (success == false) {
|
||||
masterCopy.keySet().retainAll(ACQUIRED_PAGES.keySet());
|
||||
ACQUIRED_PAGES.keySet().removeAll(masterCopy.keySet()); // remove all existing master copy we will report on
|
||||
if (!masterCopy.isEmpty()) {
|
||||
if (masterCopy.isEmpty() == false) {
|
||||
Iterator<Throwable> causes = masterCopy.values().iterator();
|
||||
Throwable firstCause = causes.next();
|
||||
RuntimeException exception = new RuntimeException(masterCopy.size() + " pages have not been released", firstCause);
|
||||
|
@ -120,7 +120,7 @@ public class MockPageCacheRecycler extends PageCacheRecycler {
|
|||
@Override
|
||||
public V<byte[]> bytePage(boolean clear) {
|
||||
final V<byte[]> page = super.bytePage(clear);
|
||||
if (!clear) {
|
||||
if (clear == false) {
|
||||
Arrays.fill(page.v(), 0, page.v().length, (byte)random.nextInt(1<<8));
|
||||
}
|
||||
return wrap(page);
|
||||
|
@ -129,7 +129,7 @@ public class MockPageCacheRecycler extends PageCacheRecycler {
|
|||
@Override
|
||||
public V<int[]> intPage(boolean clear) {
|
||||
final V<int[]> page = super.intPage(clear);
|
||||
if (!clear) {
|
||||
if (clear == false) {
|
||||
Arrays.fill(page.v(), 0, page.v().length, random.nextInt());
|
||||
}
|
||||
return wrap(page);
|
||||
|
@ -138,7 +138,7 @@ public class MockPageCacheRecycler extends PageCacheRecycler {
|
|||
@Override
|
||||
public V<long[]> longPage(boolean clear) {
|
||||
final V<long[]> page = super.longPage(clear);
|
||||
if (!clear) {
|
||||
if (clear == false) {
|
||||
Arrays.fill(page.v(), 0, page.v().length, random.nextLong());
|
||||
}
|
||||
return wrap(page);
|
||||
|
|
|
@ -134,13 +134,13 @@ public class BackgroundIndexer implements AutoCloseable {
|
|||
try {
|
||||
startLatch.await();
|
||||
logger.info("**** starting indexing thread {}", indexerId);
|
||||
while (!stop.get()) {
|
||||
while (stop.get() == false) {
|
||||
if (batch) {
|
||||
int batchSize = threadRandom.nextInt(20) + 1;
|
||||
if (hasBudget.get()) {
|
||||
// always try to get at least one
|
||||
batchSize = Math.max(Math.min(batchSize, availableBudget.availablePermits()), 1);
|
||||
if (!availableBudget.tryAcquire(batchSize, 250, TimeUnit.MILLISECONDS)) {
|
||||
if (availableBudget.tryAcquire(batchSize, 250, TimeUnit.MILLISECONDS) == false) {
|
||||
// time out -> check if we have to stop.
|
||||
continue;
|
||||
}
|
||||
|
@ -173,7 +173,7 @@ public class BackgroundIndexer implements AutoCloseable {
|
|||
}
|
||||
} else {
|
||||
|
||||
if (hasBudget.get() && !availableBudget.tryAcquire(250, TimeUnit.MILLISECONDS)) {
|
||||
if (hasBudget.get() && availableBudget.tryAcquire(250, TimeUnit.MILLISECONDS) == false) {
|
||||
// time out -> check if we have to stop.
|
||||
continue;
|
||||
}
|
||||
|
@ -278,7 +278,7 @@ public class BackgroundIndexer implements AutoCloseable {
|
|||
* @param numOfDocs number of document to index before pausing. Set to -1 to have no limit.
|
||||
*/
|
||||
public void start(int numOfDocs) {
|
||||
assert !stop.get() : "background indexer can not be started after it has stopped";
|
||||
assert stop.get() == false : "background indexer can not be started after it has stopped";
|
||||
setBudget(numOfDocs);
|
||||
startLatch.countDown();
|
||||
}
|
||||
|
|
|
@ -517,7 +517,7 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
private static void clearClusters() throws Exception {
|
||||
if (!clusters.isEmpty()) {
|
||||
if (clusters.isEmpty() == false) {
|
||||
IOUtils.close(clusters.values());
|
||||
clusters.clear();
|
||||
}
|
||||
|
@ -568,7 +568,7 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
|||
}
|
||||
success = true;
|
||||
} finally {
|
||||
if (!success) {
|
||||
if (success == false) {
|
||||
// if we failed here that means that something broke horribly so we should clear all clusters
|
||||
// TODO: just let the exception happen, WTF is all this horseshit
|
||||
// afterTestRule.forceFailure();
|
||||
|
@ -596,7 +596,7 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
public static InternalTestCluster internalCluster() {
|
||||
if (!isInternalCluster()) {
|
||||
if (isInternalCluster() == false) {
|
||||
throw new UnsupportedOperationException("current test cluster is immutable");
|
||||
}
|
||||
return (InternalTestCluster) currentCluster;
|
||||
|
@ -727,7 +727,7 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
|||
created.add(name);
|
||||
success = true;
|
||||
} finally {
|
||||
if (!success && !created.isEmpty()) {
|
||||
if (success == false && created.isEmpty() == false) {
|
||||
cluster().wipeIndices(created.toArray(new String[created.size()]));
|
||||
}
|
||||
}
|
||||
|
@ -841,7 +841,7 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
|||
getExcludeSettings(n, builder);
|
||||
}
|
||||
Settings build = builder.build();
|
||||
if (!build.isEmpty()) {
|
||||
if (build.isEmpty() == false) {
|
||||
logger.debug("allowNodes: updating [{}]'s setting to [{}]", index, build.toDelimitedString(';'));
|
||||
client().admin().indices().prepareUpdateSettings(index).setSettings(build).execute().actionGet();
|
||||
}
|
||||
|
@ -1427,7 +1427,7 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
|||
indices.add(builder.request().index());
|
||||
}
|
||||
Set<List<String>> bogusIds = new HashSet<>(); // (index, type, id)
|
||||
if (random.nextBoolean() && !builders.isEmpty() && dummyDocuments) {
|
||||
if (random.nextBoolean() && builders.isEmpty() == false && dummyDocuments) {
|
||||
builders = new ArrayList<>(builders);
|
||||
// inject some bogus docs
|
||||
final int numBogusDocs = scaledRandomIntBetween(1, builders.size() * 2);
|
||||
|
@ -1489,7 +1489,7 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
|||
}
|
||||
}
|
||||
assertThat(actualErrors, emptyIterable());
|
||||
if (!bogusIds.isEmpty()) {
|
||||
if (bogusIds.isEmpty() == false) {
|
||||
// delete the bogus types again - it might trigger merges or at least holes in the segments and enforces deleted docs!
|
||||
for (List<String> doc : bogusIds) {
|
||||
assertEquals("failed to delete a dummy doc [" + doc.get(0) + "][" + doc.get(1) + "]",
|
||||
|
@ -2121,7 +2121,7 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
|||
INSTANCE.setupSuiteScopeCluster();
|
||||
success = true;
|
||||
} finally {
|
||||
if (!success) {
|
||||
if (success == false) {
|
||||
afterClass();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -540,7 +540,7 @@ public final class InternalTestCluster extends TestCluster {
|
|||
}
|
||||
|
||||
private void ensureOpen() {
|
||||
if (!open.get()) {
|
||||
if (open.get() == false) {
|
||||
throw new RuntimeException("Cluster is already closed");
|
||||
}
|
||||
}
|
||||
|
@ -623,7 +623,7 @@ public final class InternalTestCluster extends TestCluster {
|
|||
}
|
||||
|
||||
stopNodesAndClients(nodesToRemove);
|
||||
if (!nodesToRemove.isEmpty() && size() > 0) {
|
||||
if (nodesToRemove.isEmpty() == false && size() > 0) {
|
||||
validateClusterFormed();
|
||||
}
|
||||
}
|
||||
|
@ -1394,7 +1394,7 @@ public final class InternalTestCluster extends TestCluster {
|
|||
}
|
||||
|
||||
public synchronized void wipePendingDataDirectories() {
|
||||
if (!dataDirToClean.isEmpty()) {
|
||||
if (dataDirToClean.isEmpty() == false) {
|
||||
try {
|
||||
for (Path path : dataDirToClean) {
|
||||
try {
|
||||
|
@ -2063,7 +2063,7 @@ public final class InternalTestCluster extends TestCluster {
|
|||
}
|
||||
|
||||
private synchronized void publishNode(NodeAndClient nodeAndClient) {
|
||||
assert !nodeAndClient.node().isClosed();
|
||||
assert nodeAndClient.node().isClosed() == false;
|
||||
final NavigableMap<String, NodeAndClient> newNodes = new TreeMap<>(nodes);
|
||||
newNodes.put(nodeAndClient.name, nodeAndClient);
|
||||
nodes = Collections.unmodifiableNavigableMap(newNodes);
|
||||
|
|
|
@ -148,7 +148,7 @@ public abstract class TestCluster implements Closeable {
|
|||
for (IndexMetadata indexMetadata : clusterStateResponse.getState().metadata()) {
|
||||
concreteIndices.add(indexMetadata.getIndex().getName());
|
||||
}
|
||||
if (!concreteIndices.isEmpty()) {
|
||||
if (concreteIndices.isEmpty() == false) {
|
||||
assertAcked(client().admin().indices().prepareDelete(concreteIndices.toArray(String.class)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -45,11 +45,15 @@ public abstract class TestCustomMetadata extends AbstractNamedDiffable<Metadata.
|
|||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
TestCustomMetadata that = (TestCustomMetadata) o;
|
||||
|
||||
if (!data.equals(that.data)) return false;
|
||||
if (data.equals(that.data) == false) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue