Fix shadowed vars pt4 (#80842)

Part of #19752. Fix more instances where local variable names were shadowing field names.
This commit is contained in:
Rory Hunter 2021-11-18 19:59:33 +00:00 committed by GitHub
parent 89bbac9216
commit 754393d352
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
34 changed files with 187 additions and 183 deletions

View file

@ -84,7 +84,7 @@ public class AppendProcessorFactoryTests extends ESTestCase {
}
public void testInvalidMustacheTemplate() throws Exception {
AppendProcessor.Factory factory = new AppendProcessor.Factory(TestTemplateService.instance(true));
factory = new AppendProcessor.Factory(TestTemplateService.instance(true));
Map<String, Object> config = new HashMap<>();
config.put("field", "{{field1}}");
config.put("value", "value1");

View file

@ -49,7 +49,7 @@ public class FailProcessorFactoryTests extends ESTestCase {
}
public void testInvalidMustacheTemplate() throws Exception {
FailProcessor.Factory factory = new FailProcessor.Factory(TestTemplateService.instance(true));
factory = new FailProcessor.Factory(TestTemplateService.instance(true));
Map<String, Object> config = new HashMap<>();
config.put("message", "{{error}}");
String processorTag = randomAlphaOfLength(10);

View file

@ -66,7 +66,7 @@ public class RemoveProcessorFactoryTests extends ESTestCase {
}
public void testInvalidMustacheTemplate() throws Exception {
RemoveProcessor.Factory factory = new RemoveProcessor.Factory(TestTemplateService.instance(true));
factory = new RemoveProcessor.Factory(TestTemplateService.instance(true));
Map<String, Object> config = new HashMap<>();
config.put("field", "{{field1}}");
String processorTag = randomAlphaOfLength(10);

View file

@ -93,7 +93,7 @@ public class SetProcessorFactoryTests extends ESTestCase {
}
public void testInvalidMustacheTemplate() throws Exception {
SetProcessor.Factory factory = new SetProcessor.Factory(TestTemplateService.instance(true));
factory = new SetProcessor.Factory(TestTemplateService.instance(true));
Map<String, Object> config = new HashMap<>();
config.put("field", "{{field1}}");
config.put("value", "value1");

View file

@ -285,6 +285,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase {
assertThat(e.getMessage(), equalTo("[properties] property isn't a list, but of type [java.lang.String]"));
}
@SuppressWarnings("HiddenField")
public void testLazyLoading() throws Exception {
final Path configDir = createTempDir();
final Path geoIpConfigDir = configDir.resolve("ingest-geoip");
@ -340,6 +341,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase {
assertNotNull(databaseNodeService.getDatabase("GeoLite2-ASN.mmdb").databaseReader.get());
}
@SuppressWarnings("HiddenField")
public void testLoadingCustomDatabase() throws IOException {
final Path configDir = createTempDir();
final Path geoIpConfigDir = configDir.resolve("ingest-geoip");

View file

@ -49,7 +49,7 @@ public class ExpressionFieldScriptTests extends ESTestCase {
when(fieldData.load(any())).thenReturn(atomicFieldData);
service = new ExpressionScriptEngine();
lookup = new SearchLookup(field -> field.equals("field") ? fieldType : null, (ignored, lookup) -> fieldData);
lookup = new SearchLookup(field -> field.equals("field") ? fieldType : null, (ignored, _lookup) -> fieldData);
}
private FieldScript.LeafFactory compile(String expression) {

View file

@ -50,7 +50,7 @@ public class ExpressionNumberSortScriptTests extends ESTestCase {
when(fieldData.load(any())).thenReturn(atomicFieldData);
service = new ExpressionScriptEngine();
lookup = new SearchLookup(field -> field.equals("field") ? fieldType : null, (ignored, lookup) -> fieldData);
lookup = new SearchLookup(field -> field.equals("field") ? fieldType : null, (ignored, _lookup) -> fieldData);
}
private NumberSortScript.LeafFactory compile(String expression) {

View file

@ -49,7 +49,7 @@ public class ExpressionTermsSetQueryTests extends ESTestCase {
when(fieldData.load(any())).thenReturn(atomicFieldData);
service = new ExpressionScriptEngine();
lookup = new SearchLookup(field -> field.equals("field") ? fieldType : null, (ignored, lookup) -> fieldData);
lookup = new SearchLookup(field -> field.equals("field") ? fieldType : null, (ignored, _lookup) -> fieldData);
}
private TermsSetQueryScript.LeafFactory compile(String expression) {

View file

@ -102,23 +102,23 @@ public class CircleBuilder extends ShapeBuilder<Circle, org.elasticsearch.geomet
/**
* Set the radius of the circle
* @param radius value of the circles radius
* @param unit unit name of the radius value (see {@link DistanceUnit})
* @param radiusValue value of the circles radius
* @param unitValue unit name of the radius value (see {@link DistanceUnit})
* @return this
*/
public CircleBuilder radius(double radius, String unit) {
return radius(radius, DistanceUnit.fromString(unit));
public CircleBuilder radius(double radiusValue, String unitValue) {
return radius(radiusValue, DistanceUnit.fromString(unitValue));
}
/**
* Set the radius of the circle
* @param radius value of the circles radius
* @param unit unit of the radius value (see {@link DistanceUnit})
* @param radiusValue value of the circles radius
* @param unitValue unit of the radius value (see {@link DistanceUnit})
* @return this
*/
public CircleBuilder radius(double radius, DistanceUnit unit) {
this.unit = unit;
this.radius = radius;
public CircleBuilder radius(double radiusValue, DistanceUnit unitValue) {
this.unit = unitValue;
this.radius = radiusValue;
return this;
}

View file

@ -43,8 +43,8 @@ public class GeometryCollectionBuilder extends ShapeBuilder<Shape, GeometryColle
* Read from a stream.
*/
public GeometryCollectionBuilder(StreamInput in) throws IOException {
int shapes = in.readVInt();
for (int i = 0; i < shapes; i++) {
int shapesValue = in.readVInt();
for (int i = 0; i < shapesValue; i++) {
shape(in.readNamedWriteable(ShapeBuilder.class));
}
}
@ -159,15 +159,14 @@ public class GeometryCollectionBuilder extends ShapeBuilder<Shape, GeometryColle
@Override
public Shape buildS4J() {
List<Shape> shapes = new ArrayList<>(this.shapes.size());
List<Shape> shapesList = new ArrayList<>(this.shapes.size());
for (ShapeBuilder<?, ?, ?> shape : this.shapes) {
shapes.add(shape.buildS4J());
shapesList.add(shape.buildS4J());
}
if (shapes.size() == 1) return shapes.get(0);
else return new XShapeCollection<>(shapes, SPATIAL_CONTEXT);
if (shapesList.size() == 1) return shapesList.get(0);
else return new XShapeCollection<>(shapesList, SPATIAL_CONTEXT);
// note: ShapeCollection is probably faster than a Multi* geom.
}
@ -176,13 +175,13 @@ public class GeometryCollectionBuilder extends ShapeBuilder<Shape, GeometryColle
if (this.shapes.isEmpty()) {
return GeometryCollection.EMPTY;
}
List<Geometry> shapes = new ArrayList<>(this.shapes.size());
List<Geometry> shapesList = new ArrayList<>(this.shapes.size());
for (ShapeBuilder<?, ?, ?> shape : this.shapes) {
shapes.add(shape.buildGeometry());
shapesList.add(shape.buildGeometry());
}
return new GeometryCollection<>(shapes);
return new GeometryCollection<>(shapesList);
}
@Override

View file

@ -41,6 +41,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
* Methods to wrap polygons at the dateline and building shapes from the data held by the
* builder.
*/
@SuppressWarnings("HiddenField")
public class PolygonBuilder extends ShapeBuilder<JtsGeometry, org.elasticsearch.geometry.Geometry, PolygonBuilder> {
public static final GeoShapeType TYPE = GeoShapeType.POLYGON;
@ -82,8 +83,8 @@ public class PolygonBuilder extends ShapeBuilder<JtsGeometry, org.elasticsearch.
public PolygonBuilder(StreamInput in) throws IOException {
shell = new LineStringBuilder(in);
orientation = Orientation.readFrom(in);
int holes = in.readVInt();
for (int i = 0; i < holes; i++) {
int holesValue = in.readVInt();
for (int i = 0; i < holesValue; i++) {
hole(new LineStringBuilder(in));
}
}

View file

@ -147,21 +147,21 @@ public abstract class ShapeBuilder<T extends Shape, G extends org.elasticsearch.
/**
* Add a array of coordinates to the collection
*
* @param coordinates array of {@link Coordinate}s to add
* @param coordinatesToAdd array of {@link Coordinate}s to add
* @return this
*/
public E coordinates(Coordinate... coordinates) {
return this.coordinates(Arrays.asList(coordinates));
public E coordinates(Coordinate... coordinatesToAdd) {
return this.coordinates(Arrays.asList(coordinatesToAdd));
}
/**
* Add a collection of coordinates to the collection
*
* @param coordinates array of {@link Coordinate}s to add
* @param coordinatesToAdd array of {@link Coordinate}s to add
* @return this
*/
public E coordinates(Collection<? extends Coordinate> coordinates) {
this.coordinates.addAll(coordinates);
public E coordinates(Collection<? extends Coordinate> coordinatesToAdd) {
this.coordinates.addAll(coordinatesToAdd);
return thisRef();
}

View file

@ -446,11 +446,11 @@ public class LegacyGeoShapeFieldMapper extends AbstractShapeGeometryFieldMapper<
public Query geoShapeQuery(
Geometry shape,
String fieldName,
SpatialStrategy strategy,
SpatialStrategy spatialStrategy,
ShapeRelation relation,
SearchExecutionContext context
) {
return queryProcessor.geoShapeQuery(shape, fieldName, strategy, relation, context);
return queryProcessor.geoShapeQuery(shape, fieldName, spatialStrategy, relation, context);
}
@Override
@ -517,8 +517,8 @@ public class LegacyGeoShapeFieldMapper extends AbstractShapeGeometryFieldMapper<
return this.defaultPrefixTreeStrategy;
}
public PrefixTreeStrategy resolvePrefixTreeStrategy(SpatialStrategy strategy) {
return resolvePrefixTreeStrategy(strategy.getStrategyName());
public PrefixTreeStrategy resolvePrefixTreeStrategy(SpatialStrategy spatialStrategy) {
return resolvePrefixTreeStrategy(spatialStrategy.getStrategyName());
}
public PrefixTreeStrategy resolvePrefixTreeStrategy(String strategyName) {

View file

@ -292,8 +292,8 @@ public class ScaledFloatFieldMapper extends FieldMapper {
doubleValue = objectToDouble(value);
}
double scalingFactor = getScalingFactor();
return Math.round(doubleValue * scalingFactor) / scalingFactor;
double factor = getScalingFactor();
return Math.round(doubleValue * factor) / factor;
}
};
}

View file

@ -370,16 +370,16 @@ public final class SourceConfirmedTextQuery extends Query {
MemoryIndex index = new MemoryIndex();
index.setSimilarity(FREQ_SIMILARITY);
List<Object> values = valueFetcher.apply(docID());
float freq = 0;
float frequency = 0;
for (Object value : values) {
if (value == null) {
continue;
}
index.addField(field, value.toString(), indexAnalyzer);
freq += index.search(query);
frequency += index.search(query);
index.reset();
}
return freq;
return frequency;
}
}

View file

@ -76,7 +76,7 @@ public final class SourceIntervalsSource extends IntervalsSource {
if (scorer == null) {
return null;
}
final DocIdSetIterator approximation = scorer.iterator();
final DocIdSetIterator approximationIter = scorer.iterator();
final CheckedIntFunction<List<Object>, IOException> valueFetcher = valueFetcherProvider.apply(ctx);
return new IntervalIterator() {
@ -85,27 +85,27 @@ public final class SourceIntervalsSource extends IntervalsSource {
@Override
public int docID() {
return approximation.docID();
return approximationIter.docID();
}
@Override
public long cost() {
return approximation.cost();
return approximationIter.cost();
}
@Override
public int nextDoc() throws IOException {
return doNext(approximation.nextDoc());
return doNext(approximationIter.nextDoc());
}
@Override
public int advance(int target) throws IOException {
return doNext(approximation.advance(target));
return doNext(approximationIter.advance(target));
}
private int doNext(int doc) throws IOException {
while (doc != NO_MORE_DOCS && setIterator(doc) == false) {
doc = approximation.nextDoc();
doc = approximationIter.nextDoc();
}
return doc;
}

View file

@ -130,18 +130,18 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
* Defines the minimum number of children that are required to match for the parent to be considered a match and
* the maximum number of children that are required to match for the parent to be considered a match.
*/
public HasChildQueryBuilder minMaxChildren(int minChildren, int maxChildren) {
if (minChildren <= 0) {
public HasChildQueryBuilder minMaxChildren(int min, int max) {
if (min <= 0) {
throw new IllegalArgumentException("[" + NAME + "] requires positive 'min_children' field");
}
if (maxChildren <= 0) {
if (max <= 0) {
throw new IllegalArgumentException("[" + NAME + "] requires positive 'max_children' field");
}
if (maxChildren < minChildren) {
if (max < min) {
throw new IllegalArgumentException("[" + NAME + "] 'max_children' is less than 'min_children'");
}
this.minChildren = minChildren;
this.maxChildren = maxChildren;
this.minChildren = min;
this.maxChildren = max;
return this;
}

View file

@ -443,7 +443,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
if (indexedDocumentVersion != null) {
getRequest.version(indexedDocumentVersion);
}
SetOnce<BytesReference> documentSupplier = new SetOnce<>();
SetOnce<BytesReference> docSupplier = new SetOnce<>();
queryRewriteContext.registerAsyncAction((client, listener) -> {
client.get(getRequest, ActionListener.wrap(getResponse -> {
if (getResponse.isExists() == false) {
@ -458,12 +458,12 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
"indexed document [" + indexedDocumentIndex + "/" + indexedDocumentId + "] source disabled"
);
}
documentSupplier.set(getResponse.getSourceAsBytesRef());
docSupplier.set(getResponse.getSourceAsBytesRef());
listener.onResponse(null);
}, listener::onFailure));
});
PercolateQueryBuilder rewritten = new PercolateQueryBuilder(field, documentSupplier::get);
PercolateQueryBuilder rewritten = new PercolateQueryBuilder(field, docSupplier::get);
if (name != null) {
rewritten.setName(name);
}
@ -529,13 +529,13 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
}
PercolatorFieldMapper.PercolatorFieldType pft = (PercolatorFieldMapper.PercolatorFieldType) fieldType;
String name = this.name != null ? this.name : pft.name();
String queryName = this.name != null ? this.name : pft.name();
SearchExecutionContext percolateShardContext = wrap(context);
PercolatorFieldMapper.configureContext(percolateShardContext, pft.mapUnmappedFieldsAsText);
;
PercolateQuery.QueryStore queryStore = createStore(pft.queryBuilderField, percolateShardContext);
return pft.percolateQuery(name, queryStore, documents, docSearcher, excludeNestedDocuments, context.indexVersionCreated());
return pft.percolateQuery(queryName, queryStore, documents, docSearcher, excludeNestedDocuments, context.indexVersionCreated());
}
public String getField() {

View file

@ -362,7 +362,7 @@ public class PercolatorFieldMapper extends FieldMapper {
@Override
public void parse(DocumentParserContext context) throws IOException {
SearchExecutionContext searchExecutionContext = this.searchExecutionContext.get();
SearchExecutionContext executionContext = this.searchExecutionContext.get();
if (context.doc().getField(queryBuilderField.name()) != null) {
// If a percolator query has been defined in an array object then multiple percolator queries
// could be provided. In order to prevent this we fail if we try to parse more than one query
@ -370,21 +370,21 @@ public class PercolatorFieldMapper extends FieldMapper {
throw new IllegalArgumentException("a document can only contain one percolator query");
}
configureContext(searchExecutionContext, isMapUnmappedFieldAsText());
configureContext(executionContext, isMapUnmappedFieldAsText());
XContentParser parser = context.parser();
QueryBuilder queryBuilder = parseQueryBuilder(parser, parser.getTokenLocation());
verifyQuery(queryBuilder);
// Fetching of terms, shapes and indexed scripts happen during this rewrite:
PlainActionFuture<QueryBuilder> future = new PlainActionFuture<>();
Rewriteable.rewriteAndFetch(queryBuilder, searchExecutionContext, future);
Rewriteable.rewriteAndFetch(queryBuilder, executionContext, future);
queryBuilder = future.actionGet();
Version indexVersion = context.indexSettings().getIndexVersionCreated();
createQueryBuilderField(indexVersion, queryBuilderField, queryBuilder, context);
QueryBuilder queryBuilderForProcessing = queryBuilder.rewrite(new SearchExecutionContext(searchExecutionContext));
Query query = queryBuilderForProcessing.toQuery(searchExecutionContext);
QueryBuilder queryBuilderForProcessing = queryBuilder.rewrite(new SearchExecutionContext(executionContext));
Query query = queryBuilderForProcessing.toQuery(executionContext);
processQuery(query, context);
}

View file

@ -159,17 +159,17 @@ final class QueryAnalyzer {
this.verified = false;
return QueryVisitor.EMPTY_VISITOR;
}
int minimumShouldMatch = 0;
int minimumShouldMatchValue = 0;
if (parent instanceof BooleanQuery) {
BooleanQuery bq = (BooleanQuery) parent;
if (bq.getMinimumNumberShouldMatch() == 0
&& bq.clauses().stream().anyMatch(c -> c.getOccur() == Occur.MUST || c.getOccur() == Occur.FILTER)) {
return QueryVisitor.EMPTY_VISITOR;
}
minimumShouldMatch = bq.getMinimumNumberShouldMatch();
minimumShouldMatchValue = bq.getMinimumNumberShouldMatch();
}
ResultBuilder child = new ResultBuilder(false);
child.minimumShouldMatch = minimumShouldMatch;
child.minimumShouldMatch = minimumShouldMatchValue;
children.add(child);
return child;
}
@ -188,11 +188,11 @@ final class QueryAnalyzer {
}
@Override
public void consumeTerms(Query query, Term... terms) {
boolean verified = isVerified(query);
Set<QueryExtraction> qe = Arrays.stream(terms).map(QueryExtraction::new).collect(Collectors.toUnmodifiableSet());
public void consumeTerms(Query query, Term... termsToConsume) {
boolean isVerified = isVerified(query);
Set<QueryExtraction> qe = Arrays.stream(termsToConsume).map(QueryExtraction::new).collect(Collectors.toUnmodifiableSet());
if (qe.size() > 0) {
this.terms.add(new Result(verified, qe, conjunction ? qe.size() : 1));
this.terms.add(new Result(isVerified, qe, conjunction ? qe.size() : 1));
}
}

View file

@ -1126,12 +1126,13 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
assertEquals(0, topDocs.scoreDocs[0].doc);
}
private void duelRun(PercolateQuery.QueryStore queryStore, MemoryIndex memoryIndex, IndexSearcher shardSearcher) throws IOException {
private void duelRun(PercolateQuery.QueryStore percolateQueryStore, MemoryIndex memoryIndex, IndexSearcher shardSearcher)
throws IOException {
boolean requireScore = randomBoolean();
IndexSearcher percolateSearcher = memoryIndex.createSearcher();
Query percolateQuery = fieldType.percolateQuery(
"_name",
queryStore,
percolateQueryStore,
Collections.singletonList(new BytesArray("{}")),
percolateSearcher,
false,
@ -1140,7 +1141,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
Query query = requireScore ? percolateQuery : new ConstantScoreQuery(percolateQuery);
TopDocs topDocs = shardSearcher.search(query, 100);
Query controlQuery = new ControlQuery(memoryIndex, queryStore);
Query controlQuery = new ControlQuery(memoryIndex, percolateQueryStore);
controlQuery = requireScore ? controlQuery : new ConstantScoreQuery(controlQuery);
TopDocs controlTopDocs = shardSearcher.search(controlQuery, 100);
@ -1212,12 +1213,12 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
queries.add(query);
}
private TopDocs executeQuery(PercolateQuery.QueryStore queryStore, MemoryIndex memoryIndex, IndexSearcher shardSearcher)
private TopDocs executeQuery(PercolateQuery.QueryStore percolateQueryStore, MemoryIndex memoryIndex, IndexSearcher shardSearcher)
throws IOException {
IndexSearcher percolateSearcher = memoryIndex.createSearcher();
Query percolateQuery = fieldType.percolateQuery(
"_name",
queryStore,
percolateQueryStore,
Collections.singletonList(new BytesArray("{}")),
percolateSearcher,
false,

View file

@ -217,15 +217,15 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
fieldMapper.processQuery(bq.build(), documentParserContext);
LuceneDocument document = documentParserContext.doc();
PercolatorFieldMapper.PercolatorFieldType fieldType = (PercolatorFieldMapper.PercolatorFieldType) fieldMapper.fieldType();
assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_COMPLETE));
List<IndexableField> fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.queryTermsField.name())));
PercolatorFieldMapper.PercolatorFieldType percolatorFieldType = (PercolatorFieldMapper.PercolatorFieldType) fieldMapper.fieldType();
assertThat(document.getField(percolatorFieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_COMPLETE));
List<IndexableField> fields = new ArrayList<>(Arrays.asList(document.getFields(percolatorFieldType.queryTermsField.name())));
fields.sort(Comparator.comparing(IndexableField::binaryValue));
assertThat(fields.size(), equalTo(2));
assertThat(fields.get(0).binaryValue().utf8ToString(), equalTo("field\u0000term1"));
assertThat(fields.get(1).binaryValue().utf8ToString(), equalTo("field\u0000term2"));
fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.minimumShouldMatchField.name())));
fields = new ArrayList<>(Arrays.asList(document.getFields(percolatorFieldType.minimumShouldMatchField.name())));
assertThat(fields.size(), equalTo(1));
assertThat(fields.get(0).numericValue(), equalTo(1L));
@ -238,14 +238,14 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
fieldMapper.processQuery(bq.build(), documentParserContext);
document = documentParserContext.doc();
assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_COMPLETE));
fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.queryTermsField.name())));
assertThat(document.getField(percolatorFieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_COMPLETE));
fields = new ArrayList<>(Arrays.asList(document.getFields(percolatorFieldType.queryTermsField.name())));
fields.sort(Comparator.comparing(IndexableField::binaryValue));
assertThat(fields.size(), equalTo(2));
assertThat(fields.get(0).binaryValue().utf8ToString(), equalTo("field\u0000term1"));
assertThat(fields.get(1).binaryValue().utf8ToString(), equalTo("field\u0000term2"));
fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.minimumShouldMatchField.name())));
fields = new ArrayList<>(Arrays.asList(document.getFields(percolatorFieldType.minimumShouldMatchField.name())));
assertThat(fields.size(), equalTo(1));
assertThat(fields.get(0).numericValue(), equalTo(2L));
}
@ -265,9 +265,9 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
fieldMapper.processQuery(bq.build(), documentParserContext);
LuceneDocument document = documentParserContext.doc();
PercolatorFieldMapper.PercolatorFieldType fieldType = (PercolatorFieldMapper.PercolatorFieldType) fieldMapper.fieldType();
assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_PARTIAL));
List<IndexableField> fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.rangeField.name())));
PercolatorFieldMapper.PercolatorFieldType percolatorFieldType = (PercolatorFieldMapper.PercolatorFieldType) fieldMapper.fieldType();
assertThat(document.getField(percolatorFieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_PARTIAL));
List<IndexableField> fields = new ArrayList<>(Arrays.asList(document.getFields(percolatorFieldType.rangeField.name())));
fields.sort(Comparator.comparing(IndexableField::binaryValue));
assertThat(fields.size(), equalTo(2));
assertThat(IntPoint.decodeDimension(fields.get(0).binaryValue().bytes, 12), equalTo(10));
@ -275,7 +275,7 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
assertThat(IntPoint.decodeDimension(fields.get(1).binaryValue().bytes, 12), equalTo(15));
assertThat(IntPoint.decodeDimension(fields.get(1).binaryValue().bytes, 28), equalTo(20));
fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.minimumShouldMatchField.name())));
fields = new ArrayList<>(Arrays.asList(document.getFields(percolatorFieldType.minimumShouldMatchField.name())));
assertThat(fields.size(), equalTo(1));
assertThat(fields.get(0).numericValue(), equalTo(1L));
@ -289,8 +289,8 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
fieldMapper.processQuery(bq.build(), documentParserContext);
document = documentParserContext.doc();
assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_PARTIAL));
fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.rangeField.name())));
assertThat(document.getField(percolatorFieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_PARTIAL));
fields = new ArrayList<>(Arrays.asList(document.getFields(percolatorFieldType.rangeField.name())));
fields.sort(Comparator.comparing(IndexableField::binaryValue));
assertThat(fields.size(), equalTo(2));
assertThat(IntPoint.decodeDimension(fields.get(0).binaryValue().bytes, 12), equalTo(10));
@ -298,7 +298,7 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
assertThat(LongPoint.decodeDimension(fields.get(1).binaryValue().bytes, 8), equalTo(15L));
assertThat(LongPoint.decodeDimension(fields.get(1).binaryValue().bytes, 24), equalTo(20L));
fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.minimumShouldMatchField.name())));
fields = new ArrayList<>(Arrays.asList(document.getFields(percolatorFieldType.minimumShouldMatchField.name())));
assertThat(fields.size(), equalTo(1));
assertThat(fields.get(0).numericValue(), equalTo(2L));
}
@ -312,9 +312,9 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
fieldMapper.processQuery(query, documentParserContext);
LuceneDocument document = documentParserContext.doc();
PercolatorFieldMapper.PercolatorFieldType fieldType = (PercolatorFieldMapper.PercolatorFieldType) fieldMapper.fieldType();
PercolatorFieldMapper.PercolatorFieldType percolatorFieldType = (PercolatorFieldMapper.PercolatorFieldType) fieldMapper.fieldType();
assertThat(document.getFields().size(), equalTo(1));
assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_FAILED));
assertThat(document.getField(percolatorFieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_FAILED));
}
public void testExtractTermsAndRanges_partial() throws Exception {
@ -326,10 +326,10 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
fieldMapper.processQuery(phraseQuery, documentParserContext);
LuceneDocument document = documentParserContext.doc();
PercolatorFieldMapper.PercolatorFieldType fieldType = (PercolatorFieldMapper.PercolatorFieldType) fieldMapper.fieldType();
PercolatorFieldMapper.PercolatorFieldType percolatorFieldType = (PercolatorFieldMapper.PercolatorFieldType) fieldMapper.fieldType();
assertThat(document.getFields().size(), equalTo(3));
assertThat(document.getFields().get(0).binaryValue().utf8ToString(), equalTo("field\u0000term"));
assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_PARTIAL));
assertThat(document.getField(percolatorFieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_PARTIAL));
}
public void testExtractTermsAndRanges() throws Exception {
@ -613,8 +613,7 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
public void testAllowNoAdditionalSettings() throws Exception {
addQueryFieldMappings();
IndexService indexService = createIndex("test1", Settings.EMPTY);
MapperService mapperService = indexService.mapperService();
IndexService indexServiceWithoutSettings = createIndex("test1", Settings.EMPTY);
String percolatorMapper = Strings.toString(
XContentFactory.jsonBuilder()
@ -631,7 +630,8 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
);
MapperParsingException e = expectThrows(
MapperParsingException.class,
() -> mapperService.merge("doc", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE)
() -> indexServiceWithoutSettings.mapperService()
.merge("doc", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE)
);
assertThat(e.getMessage(), containsString("Mapping definition for [" + fieldName + "] has unsupported parameters: [index : no]"));
}
@ -948,11 +948,11 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
throw new AssertionError("unexpected encoding type [" + encodingType + "]");
}
String fieldName = randomAlphaOfLength(5);
byte[] result = PercolatorFieldMapper.encodeRange(fieldName, encodedFrom, encodedTo);
String randomFieldName = randomAlphaOfLength(5);
byte[] result = PercolatorFieldMapper.encodeRange(randomFieldName, encodedFrom, encodedTo);
assertEquals(32, result.length);
BytesRef fieldAsBytesRef = new BytesRef(fieldName);
BytesRef fieldAsBytesRef = new BytesRef(randomFieldName);
MurmurHash3.Hash128 hash = new MurmurHash3.Hash128();
MurmurHash3.hash128(fieldAsBytesRef.bytes, fieldAsBytesRef.offset, fieldAsBytesRef.length, 0, hash);

View file

@ -234,8 +234,8 @@ public class RatedRequest implements Writeable, ToXContentObject {
return Collections.unmodifiableList(summaryFields);
}
public void addSummaryFields(List<String> summaryFields) {
this.summaryFields.addAll(Objects.requireNonNull(summaryFields, "no summary fields supplied"));
public void addSummaryFields(List<String> summaryFieldsToAdd) {
this.summaryFields.addAll(Objects.requireNonNull(summaryFieldsToAdd, "no summary fields supplied"));
}
private static final ParseField ID_FIELD = new ParseField("id");
@ -282,7 +282,7 @@ public class RatedRequest implements Writeable, ToXContentObject {
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
public XContentBuilder toXContent(XContentBuilder builder, Params xContentParams) throws IOException {
builder.startObject();
builder.field(ID_FIELD.getPreferredName(), this.id);
if (evaluationRequest != null) {
@ -290,7 +290,7 @@ public class RatedRequest implements Writeable, ToXContentObject {
}
builder.startArray(RATINGS_FIELD.getPreferredName());
for (RatedDocument doc : this.ratedDocs) {
doc.toXContent(builder, params);
doc.toXContent(builder, xContentParams);
}
builder.endArray();
if (this.templateId != null) {

View file

@ -311,10 +311,10 @@ public abstract class AbstractAsyncBulkByScrollAction<
/**
* Process a scroll response.
* @param lastBatchStartTimeNS the time when the last batch started. Used to calculate the throttling delay.
* @param lastBatchSize the size of the last batch. Used to calculate the throttling delay.
* @param lastBatchSizeToUse the size of the last batch. Used to calculate the throttling delay.
* @param asyncResponse the response to process from ScrollableHitSource
*/
void onScrollResponse(long lastBatchStartTimeNS, int lastBatchSize, ScrollConsumableHitsResponse asyncResponse) {
void onScrollResponse(long lastBatchStartTimeNS, int lastBatchSizeToUse, ScrollConsumableHitsResponse asyncResponse) {
ScrollableHitSource.Response response = asyncResponse.response();
logger.debug("[{}]: got scroll response with [{}] hits", task.getId(), asyncResponse.remainingHits());
if (task.isCancelled()) {
@ -350,7 +350,7 @@ public abstract class AbstractAsyncBulkByScrollAction<
}
};
prepareBulkRequestRunnable = (AbstractRunnable) threadPool.getThreadContext().preserveContext(prepareBulkRequestRunnable);
worker.delayPrepareBulkRequest(threadPool, lastBatchStartTimeNS, lastBatchSize, prepareBulkRequestRunnable);
worker.delayPrepareBulkRequest(threadPool, lastBatchStartTimeNS, lastBatchSizeToUse, prepareBulkRequestRunnable);
}
/**

View file

@ -954,10 +954,10 @@ public class AsyncBulkByScrollActionTests extends ESTestCase {
protected <Request extends ActionRequest, Response extends ActionResponse> void doExecute(
ActionType<Response> action,
Request request,
ActionListener<Response> listener
ActionListener<Response> responseActionListener
) {
if (false == expectedHeaders.equals(threadPool().getThreadContext().getHeaders())) {
listener.onFailure(
responseActionListener.onFailure(
new RuntimeException("Expected " + expectedHeaders + " but got " + threadPool().getThreadContext().getHeaders())
);
return;
@ -970,15 +970,15 @@ public class AsyncBulkByScrollActionTests extends ESTestCase {
}
if (request instanceof RefreshRequest) {
lastRefreshRequest.set((RefreshRequest) request);
listener.onResponse(null);
responseActionListener.onResponse(null);
return;
}
if (request instanceof SearchRequest) {
if (searchAttempts.incrementAndGet() <= searchesToReject) {
listener.onFailure(wrappedRejectedException());
responseActionListener.onFailure(wrappedRejectedException());
return;
}
lastSearch.set(new RequestAndListener<>((SearchRequest) request, (ActionListener<SearchResponse>) listener));
lastSearch.set(new RequestAndListener<>((SearchRequest) request, (ActionListener<SearchResponse>) responseActionListener));
return;
}
if (request instanceof SearchScrollRequest) {
@ -986,16 +986,16 @@ public class AsyncBulkByScrollActionTests extends ESTestCase {
boolean newRequest = usedScolls.add(scroll);
assertTrue("We can't reuse scroll requests", newRequest);
if (scrollAttempts.incrementAndGet() <= scrollsToReject) {
listener.onFailure(wrappedRejectedException());
responseActionListener.onFailure(wrappedRejectedException());
return;
}
lastScroll.set(new RequestAndListener<>(scroll, (ActionListener<SearchResponse>) listener));
lastScroll.set(new RequestAndListener<>(scroll, (ActionListener<SearchResponse>) responseActionListener));
return;
}
if (request instanceof ClearScrollRequest) {
ClearScrollRequest clearScroll = (ClearScrollRequest) request;
scrollsCleared.addAll(clearScroll.getScrollIds());
listener.onResponse((Response) new ClearScrollResponse(true, clearScroll.getScrollIds().size()));
responseActionListener.onResponse((Response) new ClearScrollResponse(true, clearScroll.getScrollIds().size()));
return;
}
if (request instanceof BulkRequest) {
@ -1054,10 +1054,10 @@ public class AsyncBulkByScrollActionTests extends ESTestCase {
responses[i] = BulkItemResponse.success(i, item.opType(), response);
}
}
listener.onResponse((Response) new BulkResponse(responses, 1));
responseActionListener.onResponse((Response) new BulkResponse(responses, 1));
return;
}
super.doExecute(action, request, listener);
super.doExecute(action, request, responseActionListener);
}
private Exception wrappedRejectedException() {

View file

@ -19,6 +19,7 @@ import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
@SuppressWarnings("HiddenField")
public class BulkIndexByScrollResponseMatcher extends TypeSafeMatcher<BulkByScrollResponse> {
private Matcher<Long> createdMatcher = equalTo(0L);

View file

@ -203,18 +203,18 @@ public class ClientScrollableHitSourceTests extends ESTestCase {
this.listener = listener;
}
public void respond(ActionType<Response> action, Function<Request, Response> response) {
assertEquals(action, this.action);
public void respond(ActionType<Response> actionType, Function<Request, Response> response) {
assertEquals(actionType, this.action);
listener.onResponse(response.apply(request));
}
public void fail(ActionType<Response> action, Exception response) {
assertEquals(action, this.action);
public void fail(ActionType<Response> actionType, Exception response) {
assertEquals(actionType, this.action);
listener.onFailure(response);
}
public void validateRequest(ActionType<Response> action, Consumer<? super Request> validator) {
assertEquals(action, this.action);
public void validateRequest(ActionType<Response> actionType, Consumer<? super Request> validator) {
assertEquals(actionType, this.action);
validator.accept(request);
}
}
@ -242,12 +242,12 @@ public class ClientScrollableHitSourceTests extends ESTestCase {
ActionType<Response> action,
Function<Request, Response> response
) {
ExecuteRequest<?, ?> executeRequest;
ExecuteRequest<?, ?> executeRequestCopy;
synchronized (this) {
executeRequest = this.executeRequest;
executeRequestCopy = this.executeRequest;
this.executeRequest = null;
}
((ExecuteRequest<Request, Response>) executeRequest).respond(action, response);
((ExecuteRequest<Request, Response>) executeRequestCopy).respond(action, response);
}
public <Response extends ActionResponse> void respond(ActionType<Response> action, Response response) {
@ -256,12 +256,12 @@ public class ClientScrollableHitSourceTests extends ESTestCase {
@SuppressWarnings("unchecked")
public <Response extends ActionResponse> void fail(ActionType<Response> action, Exception response) {
ExecuteRequest<?, ?> executeRequest;
ExecuteRequest<?, ?> executeRequestCopy;
synchronized (this) {
executeRequest = this.executeRequest;
executeRequestCopy = this.executeRequest;
this.executeRequest = null;
}
((ExecuteRequest<?, Response>) executeRequest).fail(action, response);
((ExecuteRequest<?, Response>) executeRequestCopy).fail(action, response);
}
@SuppressWarnings("unchecked")

View file

@ -97,11 +97,11 @@ public class URLBlobStore implements BlobStore {
}
@Override
public BlobContainer blobContainer(BlobPath path) {
public BlobContainer blobContainer(BlobPath blobPath) {
try {
return blobContainerFactory.apply(path);
return blobContainerFactory.apply(blobPath);
} catch (MalformedURLException ex) {
throw new BlobStoreException("malformed URL " + path, ex);
throw new BlobStoreException("malformed URL " + blobPath, ex);
}
}
@ -113,11 +113,11 @@ public class URLBlobStore implements BlobStore {
/**
* Builds URL using base URL and specified path
*
* @param path relative path
* @param relativePath relative path
* @return Base URL + path
*/
private URL buildPath(BlobPath path) throws MalformedURLException {
List<String> paths = path.parts();
private URL buildPath(BlobPath relativePath) throws MalformedURLException {
List<String> paths = relativePath.parts();
if (paths.isEmpty()) {
return path();
}

View file

@ -139,30 +139,30 @@ public class URLRepository extends BlobStoreRepository {
/**
* Makes sure that the url is white listed or if it points to the local file system it matches one on of the root path in path.repo
*/
private URL checkURL(URL url) {
String protocol = url.getProtocol();
private URL checkURL(URL urlToCheck) {
String protocol = urlToCheck.getProtocol();
if (protocol == null) {
throw new RepositoryException(getMetadata().name(), "unknown url protocol from URL [" + url + "]");
throw new RepositoryException(getMetadata().name(), "unknown url protocol from URL [" + urlToCheck + "]");
}
for (String supportedProtocol : supportedProtocols) {
if (supportedProtocol.equals(protocol)) {
try {
if (URIPattern.match(urlWhiteList, url.toURI())) {
if (URIPattern.match(urlWhiteList, urlToCheck.toURI())) {
// URL matches white list - no additional processing is needed
return url;
return urlToCheck;
}
} catch (URISyntaxException ex) {
logger.warn("cannot parse the specified url [{}]", url);
throw new RepositoryException(getMetadata().name(), "cannot parse the specified url [" + url + "]");
logger.warn("cannot parse the specified url [{}]", urlToCheck);
throw new RepositoryException(getMetadata().name(), "cannot parse the specified url [" + urlToCheck + "]");
}
// We didn't match white list - try to resolve against path.repo
URL normalizedUrl = environment.resolveRepoURL(url);
URL normalizedUrl = environment.resolveRepoURL(urlToCheck);
if (normalizedUrl == null) {
String logMessage = "The specified url [{}] doesn't start with any repository paths specified by the "
+ "path.repo setting or by {} setting: [{}] ";
logger.warn(logMessage, url, ALLOWED_URLS_SETTING.getKey(), environment.repoFiles());
logger.warn(logMessage, urlToCheck, ALLOWED_URLS_SETTING.getKey(), environment.repoFiles());
String exceptionMessage = "file url ["
+ url
+ urlToCheck
+ "] doesn't match any of the locations specified by path.repo or "
+ ALLOWED_URLS_SETTING.getKey();
throw new RepositoryException(getMetadata().name(), exceptionMessage);
@ -170,7 +170,7 @@ public class URLRepository extends BlobStoreRepository {
return normalizedUrl;
}
}
throw new RepositoryException(getMetadata().name(), "unsupported url protocol [" + protocol + "] from URL [" + url + "]");
throw new RepositoryException(getMetadata().name(), "unsupported url protocol [" + protocol + "] from URL [" + urlToCheck + "]");
}
@Override

View file

@ -218,8 +218,8 @@ public class Netty4HttpRequest implements HttpRequest {
}
@Override
public Netty4HttpResponse createResponse(RestStatus status, BytesReference content) {
return new Netty4HttpResponse(request.headers(), request.protocolVersion(), status, content);
public Netty4HttpResponse createResponse(RestStatus status, BytesReference contentRef) {
return new Netty4HttpResponse(request.headers(), request.protocolVersion(), status, contentRef);
}
@Override

View file

@ -87,12 +87,12 @@ public class CopyBytesSocketChannel extends Netty4NioSocketChannel {
} else {
// Zero length buffers are not added to nioBuffers by ChannelOutboundBuffer, so there is no need
// to check if the total size of all the buffers is non-zero.
ByteBuffer ioBuffer = getIoBuffer();
copyBytes(nioBuffers, nioBufferCnt, ioBuffer);
ioBuffer.flip();
ByteBuffer buffer = getIoBuffer();
copyBytes(nioBuffers, nioBufferCnt, buffer);
buffer.flip();
int attemptedBytes = ioBuffer.remaining();
final int localWrittenBytes = writeToSocketChannel(javaChannel(), ioBuffer);
int attemptedBytes = buffer.remaining();
final int localWrittenBytes = writeToSocketChannel(javaChannel(), buffer);
if (localWrittenBytes <= 0) {
incompleteWrite(true);
return;
@ -112,29 +112,29 @@ public class CopyBytesSocketChannel extends Netty4NioSocketChannel {
final RecvByteBufAllocator.Handle allocHandle = unsafe().recvBufAllocHandle();
int writeableBytes = Math.min(byteBuf.writableBytes(), MAX_BYTES_PER_WRITE);
allocHandle.attemptedBytesRead(writeableBytes);
ByteBuffer ioBuffer = getIoBuffer().limit(writeableBytes);
int bytesRead = readFromSocketChannel(javaChannel(), ioBuffer);
ioBuffer.flip();
ByteBuffer limit = getIoBuffer().limit(writeableBytes);
int bytesRead = readFromSocketChannel(javaChannel(), limit);
limit.flip();
if (bytesRead > 0) {
byteBuf.writeBytes(ioBuffer);
byteBuf.writeBytes(limit);
}
return bytesRead;
}
// Protected so that tests can verify behavior and simulate partial writes
protected int writeToSocketChannel(SocketChannel socketChannel, ByteBuffer ioBuffer) throws IOException {
return socketChannel.write(ioBuffer);
protected int writeToSocketChannel(SocketChannel socketChannel, ByteBuffer buffer) throws IOException {
return socketChannel.write(buffer);
}
// Protected so that tests can verify behavior
protected int readFromSocketChannel(SocketChannel socketChannel, ByteBuffer ioBuffer) throws IOException {
return socketChannel.read(ioBuffer);
protected int readFromSocketChannel(SocketChannel socketChannel, ByteBuffer buffer) throws IOException {
return socketChannel.read(buffer);
}
private static ByteBuffer getIoBuffer() {
ByteBuffer ioBuffer = CopyBytesSocketChannel.ioBuffer.get();
ioBuffer.clear();
return ioBuffer;
ByteBuffer buffer = CopyBytesSocketChannel.ioBuffer.get();
buffer.clear();
return buffer;
}
private void adjustMaxBytesPerGatheringWrite(int attempted, int written, int oldMaxBytesPerGatheringWrite) {

View file

@ -116,10 +116,10 @@ public class Netty4Plugin extends Plugin implements NetworkPlugin {
}
private SharedGroupFactory getSharedGroupFactory(Settings settings) {
SharedGroupFactory groupFactory = this.groupFactory.get();
if (groupFactory != null) {
assert groupFactory.getSettings().equals(settings) : "Different settings than originally provided";
return groupFactory;
SharedGroupFactory factory = this.groupFactory.get();
if (factory != null) {
assert factory.getSettings().equals(settings) : "Different settings than originally provided";
return factory;
} else {
this.groupFactory.set(new SharedGroupFactory(settings));
return this.groupFactory.get();

View file

@ -154,9 +154,9 @@ public class Netty4Transport extends TcpTransport {
}
}
private Bootstrap createClientBootstrap(SharedGroupFactory.SharedGroup sharedGroup) {
private Bootstrap createClientBootstrap(SharedGroupFactory.SharedGroup sharedGroupForBootstrap) {
final Bootstrap bootstrap = new Bootstrap();
bootstrap.group(sharedGroup.getLowLevelGroup());
bootstrap.group(sharedGroupForBootstrap.getLowLevelGroup());
// NettyAllocator will return the channel type designed to work with the configured allocator
assert Netty4NioSocketChannel.class.isAssignableFrom(NettyAllocator.getChannelType());
@ -205,7 +205,7 @@ public class Netty4Transport extends TcpTransport {
return bootstrap;
}
private void createServerBootstrap(ProfileSettings profileSettings, SharedGroupFactory.SharedGroup sharedGroup) {
private void createServerBootstrap(ProfileSettings profileSettings, SharedGroupFactory.SharedGroup sharedGroupForServerBootstrap) {
String name = profileSettings.profileName;
if (logger.isDebugEnabled()) {
logger.debug(
@ -222,7 +222,7 @@ public class Netty4Transport extends TcpTransport {
final ServerBootstrap serverBootstrap = new ServerBootstrap();
serverBootstrap.group(sharedGroup.getLowLevelGroup());
serverBootstrap.group(sharedGroupForServerBootstrap.getLowLevelGroup());
// NettyAllocator will return the channel type designed to work with the configuredAllocator
serverBootstrap.channel(NettyAllocator.getServerChannelType());

View file

@ -150,26 +150,26 @@ public class CopyBytesSocketChannelTests extends ESTestCase {
}
@Override
protected int writeToSocketChannel(SocketChannel socketChannel, ByteBuffer ioBuffer) throws IOException {
assertTrue("IO Buffer must be a direct byte buffer", ioBuffer.isDirect());
int remaining = ioBuffer.remaining();
int originalLimit = ioBuffer.limit();
protected int writeToSocketChannel(SocketChannel socketChannel, ByteBuffer buffer) throws IOException {
assertTrue("IO Buffer must be a direct byte buffer", buffer.isDirect());
int remaining = buffer.remaining();
int originalLimit = buffer.limit();
// If greater than a KB, possibly invoke a partial write.
if (remaining > 1024) {
if (randomBoolean()) {
int bytes = randomIntBetween(remaining / 2, remaining);
ioBuffer.limit(ioBuffer.position() + bytes);
buffer.limit(buffer.position() + bytes);
}
}
int written = socketChannel.write(ioBuffer);
ioBuffer.limit(originalLimit);
int written = socketChannel.write(buffer);
buffer.limit(originalLimit);
return written;
}
@Override
protected int readFromSocketChannel(SocketChannel socketChannel, ByteBuffer ioBuffer) throws IOException {
assertTrue("IO Buffer must be a direct byte buffer", ioBuffer.isDirect());
return socketChannel.read(ioBuffer);
protected int readFromSocketChannel(SocketChannel socketChannel, ByteBuffer buffer) throws IOException {
assertTrue("IO Buffer must be a direct byte buffer", buffer.isDirect());
return socketChannel.read(buffer);
}
}
}