mirror of
https://github.com/elastic/elasticsearch.git
synced 2025-04-24 23:27:25 -04:00
parent
8a1db8c6c3
commit
1eda6ac74b
263 changed files with 4724 additions and 6555 deletions
|
@ -98,9 +98,9 @@ public class AdjacencyMatrixIT extends AggregationIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSimple() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(adjacencyMatrix("tags", newMap("tag1", termQuery("tag", "tag1")).add("tag2", termQuery("tag", "tag2"))))
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
adjacencyMatrix("tags", newMap("tag1", termQuery("tag", "tag1")).add("tag2", termQuery("tag", "tag2")))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -130,9 +130,9 @@ public class AdjacencyMatrixIT extends AggregationIntegTestCase {
|
|||
}
|
||||
|
||||
public void testCustomSeparator() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(adjacencyMatrix("tags", "\t", newMap("tag1", termQuery("tag", "tag1")).add("tag2", termQuery("tag", "tag2"))))
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
adjacencyMatrix("tags", "\t", newMap("tag1", termQuery("tag", "tag1")).add("tag2", termQuery("tag", "tag2")))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -153,9 +153,9 @@ public class AdjacencyMatrixIT extends AggregationIntegTestCase {
|
|||
// https://github.com/elastic/elasticsearch/issues/8438
|
||||
public void testEmptyFilterDeclarations() throws Exception {
|
||||
QueryBuilder emptyFilter = new BoolQueryBuilder();
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(adjacencyMatrix("tags", newMap("all", emptyFilter).add("tag1", termQuery("tag", "tag1"))))
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
adjacencyMatrix("tags", newMap("all", emptyFilter).add("tag1", termQuery("tag", "tag1")))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -173,12 +173,10 @@ public class AdjacencyMatrixIT extends AggregationIntegTestCase {
|
|||
BoolQueryBuilder boolQ = new BoolQueryBuilder();
|
||||
boolQ.must(termQuery("tag", "tag1"));
|
||||
boolQ.must(termQuery("tag", "tag2"));
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
adjacencyMatrix("tags", newMap("tag1", termQuery("tag", "tag1")).add("tag2", termQuery("tag", "tag2")).add("both", boolQ))
|
||||
.subAggregation(avg("avg_value").field("value"))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
adjacencyMatrix("tags", newMap("tag1", termQuery("tag", "tag1")).add("tag2", termQuery("tag", "tag2")).add("both", boolQ))
|
||||
.subAggregation(avg("avg_value").field("value"))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -291,7 +289,7 @@ public class AdjacencyMatrixIT extends AggregationIntegTestCase {
|
|||
}
|
||||
|
||||
try {
|
||||
client().prepareSearch("idx").addAggregation(adjacencyMatrix("tags", "\t", filtersMap)).get();
|
||||
prepareSearch("idx").addAggregation(adjacencyMatrix("tags", "\t", filtersMap)).get();
|
||||
fail("SearchPhaseExecutionException should have been thrown");
|
||||
} catch (SearchPhaseExecutionException ex) {
|
||||
assertThat(ex.getCause().getMessage(), containsString("Number of filters is too large"));
|
||||
|
@ -303,11 +301,9 @@ public class AdjacencyMatrixIT extends AggregationIntegTestCase {
|
|||
}
|
||||
|
||||
public void testAsSubAggregation() {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
histogram("histo").field("value").interval(2L).subAggregation(adjacencyMatrix("matrix", newMap("all", matchAllQuery())))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
histogram("histo").field("value").interval(2L).subAggregation(adjacencyMatrix("matrix", newMap("all", matchAllQuery())))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -327,13 +323,11 @@ public class AdjacencyMatrixIT extends AggregationIntegTestCase {
|
|||
public void testWithContextBasedSubAggregation() throws Exception {
|
||||
|
||||
try {
|
||||
client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
adjacencyMatrix("tags", newMap("tag1", termQuery("tag", "tag1")).add("tag2", termQuery("tag", "tag2"))).subAggregation(
|
||||
avg("avg_value")
|
||||
)
|
||||
prepareSearch("idx").addAggregation(
|
||||
adjacencyMatrix("tags", newMap("tag1", termQuery("tag", "tag1")).add("tag2", termQuery("tag", "tag2"))).subAggregation(
|
||||
avg("avg_value")
|
||||
)
|
||||
.get();
|
||||
).get();
|
||||
|
||||
fail(
|
||||
"expected execution to fail - an attempt to have a context based numeric sub-aggregation, but there is not value source"
|
||||
|
@ -346,8 +340,7 @@ public class AdjacencyMatrixIT extends AggregationIntegTestCase {
|
|||
}
|
||||
|
||||
public void testEmptyAggregation() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx")
|
||||
.setQuery(matchAllQuery())
|
||||
SearchResponse searchResponse = prepareSearch("empty_bucket_idx").setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
histogram("histo").field("value")
|
||||
.interval(1L)
|
||||
|
|
|
@ -97,8 +97,7 @@ public class SearchCancellationIT extends AbstractSearchCancellationTestCase {
|
|||
|
||||
logger.info("Executing search");
|
||||
TimeSeriesAggregationBuilder timeSeriesAggregationBuilder = new TimeSeriesAggregationBuilder("test_agg");
|
||||
ActionFuture<SearchResponse> searchResponse = client().prepareSearch("test")
|
||||
.setQuery(matchAllQuery())
|
||||
ActionFuture<SearchResponse> searchResponse = prepareSearch("test").setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
timeSeriesAggregationBuilder.subAggregation(
|
||||
new ScriptedMetricAggregationBuilder("sub_agg").initScript(
|
||||
|
|
|
@ -176,7 +176,7 @@ public class TimeSeriesAggregationsIT extends AggregationIntegTestCase {
|
|||
}
|
||||
|
||||
public void testStandAloneTimeSeriesAgg() {
|
||||
SearchResponse response = client().prepareSearch("index").setSize(0).addAggregation(timeSeries("by_ts")).get();
|
||||
SearchResponse response = prepareSearch("index").setSize(0).addAggregation(timeSeries("by_ts")).get();
|
||||
assertNoFailures(response);
|
||||
Aggregations aggregations = response.getAggregations();
|
||||
assertNotNull(aggregations);
|
||||
|
@ -194,8 +194,7 @@ public class TimeSeriesAggregationsIT extends AggregationIntegTestCase {
|
|||
|
||||
public void testTimeSeriesGroupedByADimension() {
|
||||
String groupBy = "dim_" + randomIntBetween(0, numberOfDimensions - 1);
|
||||
SearchResponse response = client().prepareSearch("index")
|
||||
.setSize(0)
|
||||
SearchResponse response = prepareSearch("index").setSize(0)
|
||||
.addAggregation(
|
||||
terms("by_dim").field(groupBy)
|
||||
.size(data.size())
|
||||
|
@ -223,8 +222,7 @@ public class TimeSeriesAggregationsIT extends AggregationIntegTestCase {
|
|||
|
||||
public void testTimeSeriesGroupedByDateHistogram() {
|
||||
DateHistogramInterval fixedInterval = DateHistogramInterval.days(randomIntBetween(10, 100));
|
||||
SearchResponse response = client().prepareSearch("index")
|
||||
.setSize(0)
|
||||
SearchResponse response = prepareSearch("index").setSize(0)
|
||||
.addAggregation(
|
||||
dateHistogram("by_time").field("@timestamp")
|
||||
.fixedInterval(fixedInterval)
|
||||
|
@ -266,11 +264,7 @@ public class TimeSeriesAggregationsIT extends AggregationIntegTestCase {
|
|||
if (include == false) {
|
||||
queryBuilder = QueryBuilders.boolQuery().mustNot(queryBuilder);
|
||||
}
|
||||
SearchResponse response = client().prepareSearch("index")
|
||||
.setQuery(queryBuilder)
|
||||
.setSize(0)
|
||||
.addAggregation(timeSeries("by_ts"))
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("index").setQuery(queryBuilder).setSize(0).addAggregation(timeSeries("by_ts")).get();
|
||||
assertNoFailures(response);
|
||||
Aggregations aggregations = response.getAggregations();
|
||||
assertNotNull(aggregations);
|
||||
|
@ -296,8 +290,7 @@ public class TimeSeriesAggregationsIT extends AggregationIntegTestCase {
|
|||
if (include == false) {
|
||||
queryBuilder = QueryBuilders.boolQuery().mustNot(queryBuilder);
|
||||
}
|
||||
SearchResponse response = client().prepareSearch("index")
|
||||
.setQuery(queryBuilder)
|
||||
SearchResponse response = prepareSearch("index").setQuery(queryBuilder)
|
||||
.setSize(0)
|
||||
.addAggregation(timeSeries("by_ts").subAggregation(sum("filter_sum").field("metric_" + metric)))
|
||||
.addAggregation(global("everything").subAggregation(sum("all_sum").field("metric_" + metric)))
|
||||
|
@ -326,8 +319,7 @@ public class TimeSeriesAggregationsIT extends AggregationIntegTestCase {
|
|||
|
||||
ElasticsearchException e = expectThrows(
|
||||
ElasticsearchException.class,
|
||||
() -> client().prepareSearch("index")
|
||||
.setQuery(QueryBuilders.termQuery("dim_" + dim, val))
|
||||
() -> prepareSearch("index").setQuery(QueryBuilders.termQuery("dim_" + dim, val))
|
||||
.setSize(0)
|
||||
.addAggregation(global("everything").subAggregation(timeSeries("by_ts")))
|
||||
.get()
|
||||
|
@ -345,11 +337,7 @@ public class TimeSeriesAggregationsIT extends AggregationIntegTestCase {
|
|||
} else {
|
||||
queryBuilder.lte(val);
|
||||
}
|
||||
SearchResponse response = client().prepareSearch("index")
|
||||
.setQuery(queryBuilder)
|
||||
.setSize(0)
|
||||
.addAggregation(timeSeries("by_ts"))
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("index").setQuery(queryBuilder).setSize(0).addAggregation(timeSeries("by_ts")).get();
|
||||
assertNoFailures(response);
|
||||
Aggregations aggregations = response.getAggregations();
|
||||
assertNotNull(aggregations);
|
||||
|
@ -380,8 +368,7 @@ public class TimeSeriesAggregationsIT extends AggregationIntegTestCase {
|
|||
int expectedSize = count(filteredData);
|
||||
ElasticsearchException e = expectThrows(
|
||||
ElasticsearchException.class,
|
||||
() -> client().prepareSearch("index")
|
||||
.setQuery(queryBuilder)
|
||||
() -> prepareSearch("index").setQuery(queryBuilder)
|
||||
.setSize(expectedSize * 2)
|
||||
.addAggregation(timeSeries("by_ts").subAggregation(topHits("hits").size(100)))
|
||||
.addAggregation(topHits("top_hits").size(100)) // top level top hits
|
||||
|
@ -515,13 +502,9 @@ public class TimeSeriesAggregationsIT extends AggregationIntegTestCase {
|
|||
QueryBuilder queryBuilder = QueryBuilders.rangeQuery("@timestamp").lte("2021-01-01T00:10:00Z");
|
||||
|
||||
assertNoFailures(
|
||||
client().prepareSearch("test")
|
||||
.setQuery(queryBuilder)
|
||||
.setSize(10)
|
||||
.addSort("key", SortOrder.ASC)
|
||||
.addSort("@timestamp", SortOrder.ASC)
|
||||
prepareSearch("test").setQuery(queryBuilder).setSize(10).addSort("key", SortOrder.ASC).addSort("@timestamp", SortOrder.ASC)
|
||||
);
|
||||
assertNoFailures(client().prepareSearch("test").setQuery(queryBuilder).setSize(10).addAggregation(timeSeries("by_ts")));
|
||||
assertNoFailures(prepareSearch("test").setQuery(queryBuilder).setSize(10).addAggregation(timeSeries("by_ts")));
|
||||
|
||||
assertAcked(indicesAdmin().delete(new DeleteIndexRequest("test")).actionGet());
|
||||
}
|
||||
|
|
|
@ -155,7 +155,7 @@ public class TimeSeriesNestedAggregationsIT extends AggregationIntegTestCase {
|
|||
|
||||
public void testTimeSeriesAggregation() {
|
||||
final TimeSeriesAggregationBuilder timeSeries = new TimeSeriesAggregationBuilder("ts");
|
||||
final SearchResponse aggregationResponse = client().prepareSearch("index").addAggregation(timeSeries).setSize(0).get();
|
||||
final SearchResponse aggregationResponse = prepareSearch("index").addAggregation(timeSeries).setSize(0).get();
|
||||
final InternalTimeSeries ts = (InternalTimeSeries) aggregationResponse.getAggregations().asList().get(0);
|
||||
assertTimeSeriesAggregation(ts);
|
||||
}
|
||||
|
@ -164,9 +164,9 @@ public class TimeSeriesNestedAggregationsIT extends AggregationIntegTestCase {
|
|||
final TimeSeriesAggregationBuilder timeSeries = new TimeSeriesAggregationBuilder("ts").subAggregation(
|
||||
new SumAggregationBuilder("sum").field("gauge_metric")
|
||||
);
|
||||
final SearchResponse searchResponse = client().prepareSearch("index").setQuery(new MatchAllQueryBuilder()).get();
|
||||
final SearchResponse searchResponse = prepareSearch("index").setQuery(new MatchAllQueryBuilder()).get();
|
||||
assertNotEquals(numberOfDocuments, searchResponse.getHits().getHits().length);
|
||||
final SearchResponse aggregationResponse = client().prepareSearch("index").addAggregation(timeSeries).setSize(0).get();
|
||||
final SearchResponse aggregationResponse = prepareSearch("index").addAggregation(timeSeries).setSize(0).get();
|
||||
final InternalTimeSeries ts = (InternalTimeSeries) aggregationResponse.getAggregations().asList().get(0);
|
||||
assertTimeSeriesAggregation(ts);
|
||||
}
|
||||
|
@ -175,7 +175,7 @@ public class TimeSeriesNestedAggregationsIT extends AggregationIntegTestCase {
|
|||
final TimeSeriesAggregationBuilder timeSeries = new TimeSeriesAggregationBuilder("ts").subAggregation(
|
||||
new TermsAggregationBuilder("terms").field("dim_0")
|
||||
);
|
||||
final SearchResponse aggregationResponse = client().prepareSearch("index").addAggregation(timeSeries).setSize(0).get();
|
||||
final SearchResponse aggregationResponse = prepareSearch("index").addAggregation(timeSeries).setSize(0).get();
|
||||
final InternalTimeSeries ts = (InternalTimeSeries) aggregationResponse.getAggregations().asList().get(0);
|
||||
assertTimeSeriesAggregation(ts);
|
||||
}
|
||||
|
@ -184,7 +184,7 @@ public class TimeSeriesNestedAggregationsIT extends AggregationIntegTestCase {
|
|||
final TimeSeriesAggregationBuilder timeSeries = new TimeSeriesAggregationBuilder("ts").subAggregation(
|
||||
new DateHistogramAggregationBuilder("date_histogram").field("@timestamp").calendarInterval(DateHistogramInterval.HOUR)
|
||||
);
|
||||
final SearchResponse aggregationResponse = client().prepareSearch("index").addAggregation(timeSeries).setSize(0).get();
|
||||
final SearchResponse aggregationResponse = prepareSearch("index").addAggregation(timeSeries).setSize(0).get();
|
||||
final InternalTimeSeries ts = (InternalTimeSeries) aggregationResponse.getAggregations().asList().get(0);
|
||||
assertTimeSeriesAggregation(ts);
|
||||
}
|
||||
|
@ -193,7 +193,7 @@ public class TimeSeriesNestedAggregationsIT extends AggregationIntegTestCase {
|
|||
final TimeSeriesAggregationBuilder timeSeries = new TimeSeriesAggregationBuilder("ts").subAggregation(
|
||||
new CardinalityAggregationBuilder("dim_n_cardinality").field(formatDim(numberOfDimensions - 1))
|
||||
);
|
||||
final SearchResponse aggregationResponse = client().prepareSearch("index").addAggregation(timeSeries).setSize(0).get();
|
||||
final SearchResponse aggregationResponse = prepareSearch("index").addAggregation(timeSeries).setSize(0).get();
|
||||
final InternalTimeSeries ts = (InternalTimeSeries) aggregationResponse.getAggregations().asList().get(0);
|
||||
assertTimeSeriesAggregation(ts);
|
||||
ts.getBuckets().forEach(bucket -> { assertCardinality(bucket.getAggregations().get("dim_n_cardinality"), 1); });
|
||||
|
|
|
@ -116,14 +116,12 @@ public class DateDerivativeIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSingleValuedField() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
.minDocCount(0)
|
||||
.subAggregation(new DerivativePipelineAggregationBuilder("deriv", "_count"))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
.minDocCount(0)
|
||||
.subAggregation(new DerivativePipelineAggregationBuilder("deriv", "_count"))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -161,14 +159,12 @@ public class DateDerivativeIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSingleValuedFieldNormalised() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
.minDocCount(0)
|
||||
.subAggregation(new DerivativePipelineAggregationBuilder("deriv", "_count").unit(DateHistogramInterval.DAY))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
.minDocCount(0)
|
||||
.subAggregation(new DerivativePipelineAggregationBuilder("deriv", "_count").unit(DateHistogramInterval.DAY))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -225,15 +221,13 @@ public class DateDerivativeIT extends ESIntegTestCase {
|
|||
indexRandom(true, builders);
|
||||
ensureSearchable();
|
||||
|
||||
SearchResponse response = client().prepareSearch(IDX_DST_START)
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.DAY)
|
||||
.timeZone(timezone)
|
||||
.minDocCount(0)
|
||||
.subAggregation(new DerivativePipelineAggregationBuilder("deriv", "_count").unit(DateHistogramInterval.HOUR))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch(IDX_DST_START).addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.DAY)
|
||||
.timeZone(timezone)
|
||||
.minDocCount(0)
|
||||
.subAggregation(new DerivativePipelineAggregationBuilder("deriv", "_count").unit(DateHistogramInterval.HOUR))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -283,15 +277,13 @@ public class DateDerivativeIT extends ESIntegTestCase {
|
|||
indexRandom(true, builders);
|
||||
ensureSearchable();
|
||||
|
||||
SearchResponse response = client().prepareSearch(IDX_DST_END)
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.DAY)
|
||||
.timeZone(timezone)
|
||||
.minDocCount(0)
|
||||
.subAggregation(new DerivativePipelineAggregationBuilder("deriv", "_count").unit(DateHistogramInterval.HOUR))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch(IDX_DST_END).addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.DAY)
|
||||
.timeZone(timezone)
|
||||
.minDocCount(0)
|
||||
.subAggregation(new DerivativePipelineAggregationBuilder("deriv", "_count").unit(DateHistogramInterval.HOUR))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -343,15 +335,13 @@ public class DateDerivativeIT extends ESIntegTestCase {
|
|||
indexRandom(true, builders);
|
||||
ensureSearchable();
|
||||
|
||||
SearchResponse response = client().prepareSearch(IDX_DST_KATHMANDU)
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.HOUR)
|
||||
.timeZone(timezone)
|
||||
.minDocCount(0)
|
||||
.subAggregation(new DerivativePipelineAggregationBuilder("deriv", "_count").unit(DateHistogramInterval.MINUTE))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch(IDX_DST_KATHMANDU).addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.HOUR)
|
||||
.timeZone(timezone)
|
||||
.minDocCount(0)
|
||||
.subAggregation(new DerivativePipelineAggregationBuilder("deriv", "_count").unit(DateHistogramInterval.MINUTE))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -411,15 +401,13 @@ public class DateDerivativeIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSingleValuedFieldWithSubAggregation() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
.minDocCount(0)
|
||||
.subAggregation(sum("sum").field("value"))
|
||||
.subAggregation(new DerivativePipelineAggregationBuilder("deriv", "sum"))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
.minDocCount(0)
|
||||
.subAggregation(sum("sum").field("value"))
|
||||
.subAggregation(new DerivativePipelineAggregationBuilder("deriv", "sum"))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -495,14 +483,12 @@ public class DateDerivativeIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testMultiValuedField() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("dates")
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
.minDocCount(0)
|
||||
.subAggregation(new DerivativePipelineAggregationBuilder("deriv", "_count"))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateHistogram("histo").field("dates")
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
.minDocCount(0)
|
||||
.subAggregation(new DerivativePipelineAggregationBuilder("deriv", "_count"))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -553,14 +539,12 @@ public class DateDerivativeIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testUnmapped() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx_unmapped")
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
.minDocCount(0)
|
||||
.subAggregation(new DerivativePipelineAggregationBuilder("deriv", "_count"))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx_unmapped").addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
.minDocCount(0)
|
||||
.subAggregation(new DerivativePipelineAggregationBuilder("deriv", "_count"))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
|
|
@ -220,16 +220,14 @@ public class SerialDiffIT extends AggregationIntegTestCase {
|
|||
}
|
||||
|
||||
public void testBasicDiff() {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
histogram("histo").field(INTERVAL_FIELD)
|
||||
.interval(interval)
|
||||
.extendedBounds(0L, (long) (interval * (numBuckets - 1)))
|
||||
.subAggregation(metric)
|
||||
.subAggregation(diff("diff_counts", "_count").lag(lag).gapPolicy(gapPolicy))
|
||||
.subAggregation(diff("diff_values", "the_metric").lag(lag).gapPolicy(gapPolicy))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
histogram("histo").field(INTERVAL_FIELD)
|
||||
.interval(interval)
|
||||
.extendedBounds(0L, (long) (interval * (numBuckets - 1)))
|
||||
.subAggregation(metric)
|
||||
.subAggregation(diff("diff_counts", "_count").lag(lag).gapPolicy(gapPolicy))
|
||||
.subAggregation(diff("diff_values", "the_metric").lag(lag).gapPolicy(gapPolicy))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -264,15 +262,13 @@ public class SerialDiffIT extends AggregationIntegTestCase {
|
|||
|
||||
public void testInvalidLagSize() {
|
||||
try {
|
||||
client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
histogram("histo").field(INTERVAL_FIELD)
|
||||
.interval(interval)
|
||||
.extendedBounds(0L, (long) (interval * (numBuckets - 1)))
|
||||
.subAggregation(metric)
|
||||
.subAggregation(diff("diff_counts", "_count").lag(-1).gapPolicy(gapPolicy))
|
||||
)
|
||||
.get();
|
||||
prepareSearch("idx").addAggregation(
|
||||
histogram("histo").field(INTERVAL_FIELD)
|
||||
.interval(interval)
|
||||
.extendedBounds(0L, (long) (interval * (numBuckets - 1)))
|
||||
.subAggregation(metric)
|
||||
.subAggregation(diff("diff_counts", "_count").lag(-1).gapPolicy(gapPolicy))
|
||||
).get();
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), is("[lag] must be a positive integer: [diff_counts]"));
|
||||
}
|
||||
|
|
|
@ -53,8 +53,7 @@ public class QueryStringWithAnalyzersIT extends ESIntegTestCase {
|
|||
refresh();
|
||||
|
||||
assertHitCount(
|
||||
client().prepareSearch("test")
|
||||
.setQuery(queryStringQuery("foo.baz").defaultOperator(Operator.AND).field("field1").field("field2")),
|
||||
prepareSearch("test").setQuery(queryStringQuery("foo.baz").defaultOperator(Operator.AND).field("field1").field("field2")),
|
||||
1L
|
||||
);
|
||||
}
|
||||
|
|
|
@ -83,8 +83,8 @@ public class ReloadSynonymAnalyzerIT extends ESIntegTestCase {
|
|||
client().prepareIndex("test").setId("1").setSource("field", "foo").get();
|
||||
assertNoFailures(indicesAdmin().prepareRefresh("test").execute().actionGet());
|
||||
|
||||
assertHitCount(client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "baz")), 1L);
|
||||
assertHitCount(client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "buzz")), 0L);
|
||||
assertHitCount(prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "baz")), 1L);
|
||||
assertHitCount(prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "buzz")), 0L);
|
||||
Response analyzeResponse = indicesAdmin().prepareAnalyze("test", "foo").setAnalyzer("my_synonym_analyzer").get();
|
||||
assertEquals(2, analyzeResponse.getTokens().size());
|
||||
assertEquals("foo", analyzeResponse.getTokens().get(0).getTerm());
|
||||
|
@ -124,9 +124,9 @@ public class ReloadSynonymAnalyzerIT extends ESIntegTestCase {
|
|||
assertTrue(tokens.contains(testTerm));
|
||||
}
|
||||
|
||||
assertHitCount(client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "baz")), 1L);
|
||||
assertHitCount(prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "baz")), 1L);
|
||||
long expectedHitCount = preview ? 0L : 1L;
|
||||
assertHitCount(client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", testTerm)), expectedHitCount);
|
||||
assertHitCount(prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", testTerm)), expectedHitCount);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -114,8 +114,7 @@ public class HighlighterWithAnalyzersTests extends ESIntegTestCase {
|
|||
);
|
||||
client().prepareIndex("test").setId("1").setSource("name", "ARCOTEL Hotels Deutschland").get();
|
||||
refresh();
|
||||
SearchResponse search = client().prepareSearch("test")
|
||||
.setQuery(matchQuery("name.autocomplete", "deut tel").operator(Operator.OR))
|
||||
SearchResponse search = prepareSearch("test").setQuery(matchQuery("name.autocomplete", "deut tel").operator(Operator.OR))
|
||||
.highlighter(new HighlightBuilder().field("name.autocomplete"))
|
||||
.get();
|
||||
assertHighlight(search, 0, "name.autocomplete", 0, equalTo("ARCO<em>TEL</em> Ho<em>tel</em>s <em>Deut</em>schland"));
|
||||
|
|
|
@ -560,10 +560,10 @@ public class DataStreamIT extends ESIntegTestCase {
|
|||
false
|
||||
);
|
||||
verifyResolvability(dataStreamName, indicesAdmin().prepareRefresh(dataStreamName), false);
|
||||
verifyResolvability(dataStreamName, client().prepareSearch(dataStreamName), false, 1);
|
||||
verifyResolvability(dataStreamName, prepareSearch(dataStreamName), false, 1);
|
||||
verifyResolvability(
|
||||
dataStreamName,
|
||||
client().prepareMultiSearch().add(client().prepareSearch(dataStreamName).setQuery(matchAllQuery())),
|
||||
client().prepareMultiSearch().add(prepareSearch(dataStreamName).setQuery(matchAllQuery())),
|
||||
false
|
||||
);
|
||||
verifyResolvability(dataStreamName, indicesAdmin().prepareClearCache(dataStreamName), false);
|
||||
|
@ -606,10 +606,10 @@ public class DataStreamIT extends ESIntegTestCase {
|
|||
|
||||
String wildcardExpression = "logs*";
|
||||
verifyResolvability(wildcardExpression, indicesAdmin().prepareRefresh(wildcardExpression), false);
|
||||
verifyResolvability(wildcardExpression, client().prepareSearch(wildcardExpression), false, 2);
|
||||
verifyResolvability(wildcardExpression, prepareSearch(wildcardExpression), false, 2);
|
||||
verifyResolvability(
|
||||
wildcardExpression,
|
||||
client().prepareMultiSearch().add(client().prepareSearch(wildcardExpression).setQuery(matchAllQuery())),
|
||||
client().prepareMultiSearch().add(prepareSearch(wildcardExpression).setQuery(matchAllQuery())),
|
||||
false
|
||||
);
|
||||
verifyResolvability(wildcardExpression, indicesAdmin().prepareClearCache(wildcardExpression), false);
|
||||
|
@ -754,9 +754,9 @@ public class DataStreamIT extends ESIntegTestCase {
|
|||
);
|
||||
|
||||
// Searching the data stream directly should return all hits:
|
||||
assertSearchHits(client().prepareSearch("logs-foobar"), "1", "2");
|
||||
assertSearchHits(prepareSearch("logs-foobar"), "1", "2");
|
||||
// Search the alias should only return document 2, because it matches with the defined filter in the alias:
|
||||
assertSearchHits(client().prepareSearch("foo"), "2");
|
||||
assertSearchHits(prepareSearch("foo"), "2");
|
||||
|
||||
// Update alias:
|
||||
addAction = new AliasActions(AliasActions.Type.ADD).index(dataStreamName)
|
||||
|
@ -784,9 +784,9 @@ public class DataStreamIT extends ESIntegTestCase {
|
|||
);
|
||||
|
||||
// Searching the data stream directly should return all hits:
|
||||
assertSearchHits(client().prepareSearch("logs-foobar"), "1", "2");
|
||||
assertSearchHits(prepareSearch("logs-foobar"), "1", "2");
|
||||
// Search the alias should only return document 1, because it matches with the defined filter in the alias:
|
||||
assertSearchHits(client().prepareSearch("foo"), "1");
|
||||
assertSearchHits(prepareSearch("foo"), "1");
|
||||
}
|
||||
|
||||
public void testSearchFilteredAndUnfilteredAlias() throws Exception {
|
||||
|
|
|
@ -251,8 +251,7 @@ public class GeoIpDownloaderIT extends AbstractGeoIpIT {
|
|||
BoolQueryBuilder queryBuilder = new BoolQueryBuilder().filter(new MatchQueryBuilder("name", id))
|
||||
.filter(new RangeQueryBuilder("chunk").from(metadata.firstChunk()).to(metadata.lastChunk(), true));
|
||||
int size = metadata.lastChunk() - metadata.firstChunk() + 1;
|
||||
SearchResponse res = client().prepareSearch(GeoIpDownloader.DATABASES_INDEX)
|
||||
.setSize(size)
|
||||
SearchResponse res = prepareSearch(GeoIpDownloader.DATABASES_INDEX).setSize(size)
|
||||
.setQuery(queryBuilder)
|
||||
.addSort("chunk", SortOrder.ASC)
|
||||
.get();
|
||||
|
|
|
@ -565,30 +565,22 @@ public class MoreExpressionIT extends ESIntegTestCase {
|
|||
client().prepareIndex("agg_index").setId("4").setSource("one", 4.0, "two", 2.0, "three", 3.0, "four", 4.0),
|
||||
client().prepareIndex("agg_index").setId("5").setSource("one", 5.0, "two", 2.0, "three", 3.0, "four", 4.0)
|
||||
);
|
||||
SearchResponse response = client().prepareSearch("agg_index")
|
||||
.addAggregation(
|
||||
histogram("histogram").field("one")
|
||||
.interval(2)
|
||||
.subAggregation(sum("twoSum").field("two"))
|
||||
.subAggregation(sum("threeSum").field("three"))
|
||||
.subAggregation(sum("fourSum").field("four"))
|
||||
.subAggregation(
|
||||
bucketScript(
|
||||
"totalSum",
|
||||
new Script(
|
||||
ScriptType.INLINE,
|
||||
ExpressionScriptEngine.NAME,
|
||||
"_value0 + _value1 + _value2",
|
||||
Collections.emptyMap()
|
||||
),
|
||||
"twoSum",
|
||||
"threeSum",
|
||||
"fourSum"
|
||||
)
|
||||
SearchResponse response = prepareSearch("agg_index").addAggregation(
|
||||
histogram("histogram").field("one")
|
||||
.interval(2)
|
||||
.subAggregation(sum("twoSum").field("two"))
|
||||
.subAggregation(sum("threeSum").field("three"))
|
||||
.subAggregation(sum("fourSum").field("four"))
|
||||
.subAggregation(
|
||||
bucketScript(
|
||||
"totalSum",
|
||||
new Script(ScriptType.INLINE, ExpressionScriptEngine.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()),
|
||||
"twoSum",
|
||||
"threeSum",
|
||||
"fourSum"
|
||||
)
|
||||
)
|
||||
.execute()
|
||||
.actionGet();
|
||||
)
|
||||
).execute().actionGet();
|
||||
|
||||
Histogram histogram = response.getAggregations().get("histogram");
|
||||
assertThat(histogram, notNullValue());
|
||||
|
|
|
@ -61,13 +61,11 @@ public class StoredExpressionIT extends ESIntegTestCase {
|
|||
assertThat(e.toString(), containsString("cannot execute scripts using [field] context"));
|
||||
}
|
||||
try {
|
||||
client().prepareSearch("test")
|
||||
.setSource(
|
||||
new SearchSourceBuilder().aggregation(
|
||||
AggregationBuilders.terms("test").script(new Script(ScriptType.STORED, null, "script1", Collections.emptyMap()))
|
||||
)
|
||||
prepareSearch("test").setSource(
|
||||
new SearchSourceBuilder().aggregation(
|
||||
AggregationBuilders.terms("test").script(new Script(ScriptType.STORED, null, "script1", Collections.emptyMap()))
|
||||
)
|
||||
.get();
|
||||
).get();
|
||||
} catch (Exception e) {
|
||||
assertThat(e.toString(), containsString("cannot execute scripts using [aggs] context"));
|
||||
}
|
||||
|
|
|
@ -74,7 +74,7 @@ public class LegacyGeoShapeIT extends GeoShapeIntegTestCase {
|
|||
}));
|
||||
|
||||
// test self crossing of circles
|
||||
SearchResponse searchResponse = client().prepareSearch("test").setQuery(geoShapeQuery("shape", new Circle(30, 50, 77000))).get();
|
||||
SearchResponse searchResponse = prepareSearch("test").setQuery(geoShapeQuery("shape", new Circle(30, 50, 77000))).get();
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -66,11 +66,9 @@ public class MatchOnlyTextMapperIT extends ESIntegTestCase {
|
|||
BulkResponse bulkItemResponses = bulk.get();
|
||||
assertNoFailures(bulkItemResponses);
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(QueryBuilders.matchPhraseQuery("message", "marking and sending shard"))
|
||||
.setSize(500)
|
||||
.highlighter(new HighlightBuilder().field("message"))
|
||||
.get();
|
||||
SearchResponse searchResponse = prepareSearch("test").setQuery(
|
||||
QueryBuilders.matchPhraseQuery("message", "marking and sending shard")
|
||||
).setSize(500).highlighter(new HighlightBuilder().field("message")).get();
|
||||
assertNoFailures(searchResponse);
|
||||
for (SearchHit searchHit : searchResponse.getHits()) {
|
||||
assertThat(
|
||||
|
@ -114,11 +112,9 @@ public class MatchOnlyTextMapperIT extends ESIntegTestCase {
|
|||
BulkResponse bulkItemResponses = bulk.get();
|
||||
assertNoFailures(bulkItemResponses);
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(QueryBuilders.matchPhraseQuery("message", "marking and sending shard"))
|
||||
.setSize(500)
|
||||
.highlighter(new HighlightBuilder().field("message"))
|
||||
.get();
|
||||
SearchResponse searchResponse = prepareSearch("test").setQuery(
|
||||
QueryBuilders.matchPhraseQuery("message", "marking and sending shard")
|
||||
).setSize(500).highlighter(new HighlightBuilder().field("message")).get();
|
||||
assertNoFailures(searchResponse);
|
||||
for (SearchHit searchHit : searchResponse.getHits()) {
|
||||
assertThat(
|
||||
|
|
|
@ -39,30 +39,24 @@ public class RankFeaturesMapperIntegrationIT extends ESIntegTestCase {
|
|||
|
||||
public void testRankFeaturesTermQuery() throws IOException {
|
||||
init();
|
||||
SearchResponse response = client().prepareSearch(INDEX_NAME)
|
||||
.setQuery(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE))
|
||||
.get();
|
||||
SearchResponse response = prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE)).get();
|
||||
assertThat(response.getHits().getTotalHits().value, equalTo(2L));
|
||||
for (SearchHit hit : response.getHits().getHits()) {
|
||||
assertThat(hit.getScore(), equalTo(20f));
|
||||
}
|
||||
|
||||
response = client().prepareSearch(INDEX_NAME)
|
||||
.setQuery(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE).boost(100f))
|
||||
.get();
|
||||
response = prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE).boost(100f)).get();
|
||||
assertThat(response.getHits().getTotalHits().value, equalTo(2L));
|
||||
for (SearchHit hit : response.getHits().getHits()) {
|
||||
assertThat(hit.getScore(), equalTo(2000f));
|
||||
}
|
||||
|
||||
response = client().prepareSearch(INDEX_NAME)
|
||||
.setQuery(
|
||||
QueryBuilders.boolQuery()
|
||||
.should(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE))
|
||||
.should(QueryBuilders.termQuery(FIELD_NAME, LOWER_RANKED_FEATURE).boost(3f))
|
||||
.minimumShouldMatch(1)
|
||||
)
|
||||
.get();
|
||||
response = prepareSearch(INDEX_NAME).setQuery(
|
||||
QueryBuilders.boolQuery()
|
||||
.should(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE))
|
||||
.should(QueryBuilders.termQuery(FIELD_NAME, LOWER_RANKED_FEATURE).boost(3f))
|
||||
.minimumShouldMatch(1)
|
||||
).get();
|
||||
assertThat(response.getHits().getTotalHits().value, equalTo(3L));
|
||||
for (SearchHit hit : response.getHits().getHits()) {
|
||||
if (hit.getId().equals("all")) {
|
||||
|
@ -76,7 +70,7 @@ public class RankFeaturesMapperIntegrationIT extends ESIntegTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
response = client().prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, "missing_feature")).get();
|
||||
response = prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, "missing_feature")).get();
|
||||
assertThat(response.getHits().getTotalHits().value, equalTo(0L));
|
||||
}
|
||||
|
||||
|
|
|
@ -175,19 +175,19 @@ public class TokenCountFieldMapperIntegrationIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
private SearchResponse searchById(String id) {
|
||||
return prepareSearch().setQuery(QueryBuilders.termQuery("_id", id)).get();
|
||||
return prepareTokenCountFieldMapperSearch().setQuery(QueryBuilders.termQuery("_id", id)).get();
|
||||
}
|
||||
|
||||
private SearchRequestBuilder searchByNumericRange(int low, int high) {
|
||||
return prepareSearch().setQuery(
|
||||
return prepareTokenCountFieldMapperSearch().setQuery(
|
||||
QueryBuilders.rangeQuery(
|
||||
randomFrom(Arrays.asList("foo.token_count", "foo.token_count_unstored", "foo.token_count_with_doc_values"))
|
||||
).gte(low).lte(high)
|
||||
);
|
||||
}
|
||||
|
||||
private SearchRequestBuilder prepareSearch() {
|
||||
SearchRequestBuilder request = client().prepareSearch("test");
|
||||
private SearchRequestBuilder prepareTokenCountFieldMapperSearch() {
|
||||
SearchRequestBuilder request = prepareSearch("test");
|
||||
request.addStoredField("foo.token_count");
|
||||
request.addStoredField("foo.token_count_without_position_increments");
|
||||
if (loadCountedFields) {
|
||||
|
|
|
@ -44,8 +44,7 @@ import static org.hamcrest.Matchers.sameInstance;
|
|||
public class ChildrenIT extends AbstractParentChildTestCase {
|
||||
|
||||
public void testSimpleChildrenAgg() {
|
||||
final SearchRequestBuilder searchRequest = client().prepareSearch("test")
|
||||
.setQuery(matchQuery("randomized", true))
|
||||
final SearchRequestBuilder searchRequest = prepareSearch("test").setQuery(matchQuery("randomized", true))
|
||||
.addAggregation(children("to_comment", "comment"));
|
||||
final SearchResponse searchResponse = searchRequest.get();
|
||||
long count = categoryToControl.values().stream().mapToLong(control -> control.commentIds.size()).sum();
|
||||
|
@ -55,8 +54,7 @@ public class ChildrenIT extends AbstractParentChildTestCase {
|
|||
}
|
||||
|
||||
public void testChildrenAggs() {
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(matchQuery("randomized", true))
|
||||
SearchResponse searchResponse = prepareSearch("test").setQuery(matchQuery("randomized", true))
|
||||
.addAggregation(
|
||||
terms("category").field("category")
|
||||
.size(10000)
|
||||
|
@ -98,8 +96,7 @@ public class ChildrenIT extends AbstractParentChildTestCase {
|
|||
}
|
||||
|
||||
public void testParentWithMultipleBuckets() {
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(matchQuery("randomized", false))
|
||||
SearchResponse searchResponse = prepareSearch("test").setQuery(matchQuery("randomized", false))
|
||||
.addAggregation(
|
||||
terms("category").field("category")
|
||||
.size(10000)
|
||||
|
@ -173,9 +170,9 @@ public class ChildrenIT extends AbstractParentChildTestCase {
|
|||
indexRandom(true, requests);
|
||||
|
||||
for (int i = 0; i < 10; i++) {
|
||||
SearchResponse searchResponse = client().prepareSearch(indexName)
|
||||
.addAggregation(children("children", "child").subAggregation(sum("counts").field("count")))
|
||||
.get();
|
||||
SearchResponse searchResponse = prepareSearch(indexName).addAggregation(
|
||||
children("children", "child").subAggregation(sum("counts").field("count"))
|
||||
).get();
|
||||
|
||||
assertNoFailures(searchResponse);
|
||||
Children children = searchResponse.getAggregations().get("children");
|
||||
|
@ -202,7 +199,7 @@ public class ChildrenIT extends AbstractParentChildTestCase {
|
|||
}
|
||||
|
||||
public void testNonExistingChildType() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("test").addAggregation(children("non-existing", "xyz")).get();
|
||||
SearchResponse searchResponse = prepareSearch("test").addAggregation(children("non-existing", "xyz")).get();
|
||||
assertNoFailures(searchResponse);
|
||||
|
||||
Children children = searchResponse.getAggregations().get("non-existing");
|
||||
|
@ -254,8 +251,7 @@ public class ChildrenIT extends AbstractParentChildTestCase {
|
|||
requests.add(createIndexRequest(indexName, childType, "16", "2", "color", "green", "size", "44"));
|
||||
indexRandom(true, requests);
|
||||
|
||||
SearchResponse response = client().prepareSearch(indexName)
|
||||
.setQuery(hasChildQuery(childType, termQuery("color", "orange"), ScoreMode.None))
|
||||
SearchResponse response = prepareSearch(indexName).setQuery(hasChildQuery(childType, termQuery("color", "orange"), ScoreMode.None))
|
||||
.addAggregation(
|
||||
children("my-refinements", childType).subAggregation(terms("my-colors").field("color"))
|
||||
.subAggregation(terms("my-sizes").field("size"))
|
||||
|
@ -304,8 +300,7 @@ public class ChildrenIT extends AbstractParentChildTestCase {
|
|||
createIndexRequest(indexName, childType, "3", "2", "name", "brussels").setRouting("1").get();
|
||||
refresh();
|
||||
|
||||
SearchResponse response = client().prepareSearch(indexName)
|
||||
.setQuery(matchQuery("name", "europe"))
|
||||
SearchResponse response = prepareSearch(indexName).setQuery(matchQuery("name", "europe"))
|
||||
.addAggregation(
|
||||
children(parentType, parentType).subAggregation(children(childType, childType).subAggregation(terms("name").field("name")))
|
||||
)
|
||||
|
@ -355,8 +350,7 @@ public class ChildrenIT extends AbstractParentChildTestCase {
|
|||
requests.add(createIndexRequest("index", "childType", "8", "3", "name", "Dan", "age", 1));
|
||||
indexRandom(true, requests);
|
||||
|
||||
SearchResponse response = client().prepareSearch("index")
|
||||
.setSize(0)
|
||||
SearchResponse response = prepareSearch("index").setSize(0)
|
||||
.addAggregation(
|
||||
AggregationBuilders.terms("towns")
|
||||
.field("town")
|
||||
|
|
|
@ -33,8 +33,7 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
public class ParentIT extends AbstractParentChildTestCase {
|
||||
|
||||
public void testSimpleParentAgg() {
|
||||
final SearchRequestBuilder searchRequest = client().prepareSearch("test")
|
||||
.setSize(0)
|
||||
final SearchRequestBuilder searchRequest = prepareSearch("test").setSize(0)
|
||||
.setQuery(matchQuery("randomized", true))
|
||||
.addAggregation(parent("to_article", "comment"));
|
||||
SearchResponse searchResponse = searchRequest.get();
|
||||
|
@ -53,8 +52,7 @@ public class ParentIT extends AbstractParentChildTestCase {
|
|||
}
|
||||
|
||||
public void testSimpleParentAggWithSubAgg() {
|
||||
final SearchRequestBuilder searchRequest = client().prepareSearch("test")
|
||||
.setSize(10000)
|
||||
final SearchRequestBuilder searchRequest = prepareSearch("test").setSize(10000)
|
||||
.setQuery(matchQuery("randomized", true))
|
||||
.addAggregation(parent("to_article", "comment").subAggregation(terms("category").field("category").size(10000)));
|
||||
SearchResponse searchResponse = searchRequest.get();
|
||||
|
@ -108,8 +106,7 @@ public class ParentIT extends AbstractParentChildTestCase {
|
|||
}
|
||||
|
||||
public void testParentAggs() throws Exception {
|
||||
final SearchRequestBuilder searchRequest = client().prepareSearch("test")
|
||||
.setSize(10000)
|
||||
final SearchRequestBuilder searchRequest = prepareSearch("test").setSize(10000)
|
||||
.setQuery(matchQuery("randomized", true))
|
||||
.addAggregation(
|
||||
terms("to_commenter").field("commenter")
|
||||
|
@ -200,7 +197,7 @@ public class ParentIT extends AbstractParentChildTestCase {
|
|||
}
|
||||
|
||||
public void testNonExistingParentType() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("test").addAggregation(parent("non-existing", "xyz")).get();
|
||||
SearchResponse searchResponse = prepareSearch("test").addAggregation(parent("non-existing", "xyz")).get();
|
||||
assertNoFailures(searchResponse);
|
||||
|
||||
Parent parent = searchResponse.getAggregations().get("non-existing");
|
||||
|
@ -209,8 +206,7 @@ public class ParentIT extends AbstractParentChildTestCase {
|
|||
}
|
||||
|
||||
public void testTermsParentAggTerms() throws Exception {
|
||||
final SearchRequestBuilder searchRequest = client().prepareSearch("test")
|
||||
.setSize(10000)
|
||||
final SearchRequestBuilder searchRequest = prepareSearch("test").setSize(10000)
|
||||
.setQuery(matchQuery("randomized", true))
|
||||
.addAggregation(
|
||||
terms("to_commenter").field("commenter")
|
||||
|
|
|
@ -89,49 +89,43 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
createIndexRequest("test", "grandchild", "gc1", "c1", "gc_field", "gc_value1").setRouting("p1").get();
|
||||
refresh();
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(
|
||||
boolQuery().must(matchAllQuery())
|
||||
.filter(
|
||||
hasChildQuery(
|
||||
"child",
|
||||
boolQuery().must(termQuery("c_field", "c_value1"))
|
||||
.filter(hasChildQuery("grandchild", termQuery("gc_field", "gc_value1"), ScoreMode.None)),
|
||||
ScoreMode.None
|
||||
)
|
||||
SearchResponse searchResponse = prepareSearch("test").setQuery(
|
||||
boolQuery().must(matchAllQuery())
|
||||
.filter(
|
||||
hasChildQuery(
|
||||
"child",
|
||||
boolQuery().must(termQuery("c_field", "c_value1"))
|
||||
.filter(hasChildQuery("grandchild", termQuery("gc_field", "gc_value1"), ScoreMode.None)),
|
||||
ScoreMode.None
|
||||
)
|
||||
)
|
||||
.get();
|
||||
)
|
||||
).get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1"));
|
||||
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", termQuery("p_field", "p_value1"), false)))
|
||||
.execute()
|
||||
.actionGet();
|
||||
searchResponse = prepareSearch("test").setQuery(
|
||||
boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", termQuery("p_field", "p_value1"), false))
|
||||
).execute().actionGet();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("c1"));
|
||||
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(boolQuery().must(matchAllQuery()).filter(hasParentQuery("child", termQuery("c_field", "c_value1"), false)))
|
||||
.execute()
|
||||
.actionGet();
|
||||
searchResponse = prepareSearch("test").setQuery(
|
||||
boolQuery().must(matchAllQuery()).filter(hasParentQuery("child", termQuery("c_field", "c_value1"), false))
|
||||
).execute().actionGet();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("gc1"));
|
||||
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(hasParentQuery("parent", termQuery("p_field", "p_value1"), false))
|
||||
searchResponse = prepareSearch("test").setQuery(hasParentQuery("parent", termQuery("p_field", "p_value1"), false))
|
||||
.execute()
|
||||
.actionGet();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("c1"));
|
||||
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(hasParentQuery("child", termQuery("c_field", "c_value1"), false))
|
||||
searchResponse = prepareSearch("test").setQuery(hasParentQuery("child", termQuery("c_field", "c_value1"), false))
|
||||
.execute()
|
||||
.actionGet();
|
||||
assertNoFailures(searchResponse);
|
||||
|
@ -148,9 +142,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
createIndexRequest("test", "foo", "1", null, "foo", 1).get();
|
||||
createIndexRequest("test", "test", "2", "1", "foo", 1).get();
|
||||
refresh();
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(hasChildQuery("test", matchQuery("foo", 1), ScoreMode.None))
|
||||
.get();
|
||||
SearchResponse searchResponse = prepareSearch("test").setQuery(hasChildQuery("test", matchQuery("foo", 1), ScoreMode.None)).get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1"));
|
||||
|
@ -172,7 +164,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
|
||||
// TEST FETCHING _parent from child
|
||||
SearchResponse searchResponse;
|
||||
searchResponse = client().prepareSearch("test").setQuery(idsQuery().addIds("c1")).get();
|
||||
searchResponse = prepareSearch("test").setQuery(idsQuery().addIds("c1")).get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("c1"));
|
||||
|
@ -180,9 +172,9 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
assertThat(extractValue("join_field.parent", searchResponse.getHits().getAt(0).getSourceAsMap()), equalTo("p1"));
|
||||
|
||||
// TEST matching on parent
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(boolQuery().filter(termQuery("join_field#parent", "p1")).filter(termQuery("join_field", "child")))
|
||||
.get();
|
||||
searchResponse = prepareSearch("test").setQuery(
|
||||
boolQuery().filter(termQuery("join_field#parent", "p1")).filter(termQuery("join_field", "child"))
|
||||
).get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L));
|
||||
assertThat(searchResponse.getHits().getAt(0).getId(), anyOf(equalTo("c1"), equalTo("c2")));
|
||||
|
@ -193,28 +185,28 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
assertThat(extractValue("join_field.parent", searchResponse.getHits().getAt(1).getSourceAsMap()), equalTo("p1"));
|
||||
|
||||
// HAS CHILD
|
||||
searchResponse = client().prepareSearch("test").setQuery(randomHasChild("child", "c_field", "yellow")).get();
|
||||
searchResponse = prepareSearch("test").setQuery(randomHasChild("child", "c_field", "yellow")).get();
|
||||
assertHitCount(searchResponse, 1L);
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1"));
|
||||
|
||||
searchResponse = client().prepareSearch("test").setQuery(randomHasChild("child", "c_field", "blue")).execute().actionGet();
|
||||
searchResponse = prepareSearch("test").setQuery(randomHasChild("child", "c_field", "blue")).execute().actionGet();
|
||||
assertHitCount(searchResponse, 1L);
|
||||
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p2"));
|
||||
|
||||
searchResponse = client().prepareSearch("test").setQuery(randomHasChild("child", "c_field", "red")).get();
|
||||
searchResponse = prepareSearch("test").setQuery(randomHasChild("child", "c_field", "red")).get();
|
||||
assertHitCount(searchResponse, 2L);
|
||||
assertThat(searchResponse.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1")));
|
||||
assertThat(searchResponse.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1")));
|
||||
|
||||
// HAS PARENT
|
||||
searchResponse = client().prepareSearch("test").setQuery(randomHasParent("parent", "p_field", "p_value2")).get();
|
||||
searchResponse = prepareSearch("test").setQuery(randomHasParent("parent", "p_field", "p_value2")).get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertHitCount(searchResponse, 2L);
|
||||
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("c3"));
|
||||
assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("c4"));
|
||||
|
||||
searchResponse = client().prepareSearch("test").setQuery(randomHasParent("parent", "p_field", "p_value1")).get();
|
||||
searchResponse = prepareSearch("test").setQuery(randomHasParent("parent", "p_field", "p_value1")).get();
|
||||
assertHitCount(searchResponse, 2L);
|
||||
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("c1"));
|
||||
assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("c2"));
|
||||
|
@ -247,10 +239,8 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
|
||||
for (int i = 1; i <= 10; i++) {
|
||||
logger.info("Round {}", i);
|
||||
assertNoFailures(
|
||||
client().prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", matchAllQuery(), ScoreMode.Max)))
|
||||
);
|
||||
assertNoFailures(client().prepareSearch("test").setQuery(constantScoreQuery(hasParentQuery("parent", matchAllQuery(), true))));
|
||||
assertNoFailures(prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", matchAllQuery(), ScoreMode.Max))));
|
||||
assertNoFailures(prepareSearch("test").setQuery(constantScoreQuery(hasParentQuery("parent", matchAllQuery(), true))));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -286,10 +276,9 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
|
||||
assertThat(parentToChildren.isEmpty(), equalTo(false));
|
||||
for (Map.Entry<String, Set<String>> parentToChildrenEntry : parentToChildren.entrySet()) {
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(constantScoreQuery(hasParentQuery("parent", termQuery("p_field", parentToChildrenEntry.getKey()), false)))
|
||||
.setSize(numChildDocsPerParent)
|
||||
.get();
|
||||
SearchResponse searchResponse = prepareSearch("test").setQuery(
|
||||
constantScoreQuery(hasParentQuery("parent", termQuery("p_field", parentToChildrenEntry.getKey()), false))
|
||||
).setSize(numChildDocsPerParent).get();
|
||||
|
||||
assertNoFailures(searchResponse);
|
||||
Set<String> childIds = parentToChildrenEntry.getValue();
|
||||
|
@ -323,44 +312,42 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
|
||||
// HAS CHILD QUERY
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))
|
||||
.get();
|
||||
SearchResponse searchResponse = prepareSearch("test").setQuery(
|
||||
hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None)
|
||||
).get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1"));
|
||||
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None))
|
||||
.get();
|
||||
searchResponse = prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None)).get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p2"));
|
||||
|
||||
searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "red"), ScoreMode.None)).get();
|
||||
searchResponse = prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "red"), ScoreMode.None)).get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L));
|
||||
assertThat(searchResponse.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1")));
|
||||
assertThat(searchResponse.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1")));
|
||||
|
||||
// HAS CHILD FILTER
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None)))
|
||||
.get();
|
||||
searchResponse = prepareSearch("test").setQuery(
|
||||
constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))
|
||||
).get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1"));
|
||||
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None)))
|
||||
.get();
|
||||
searchResponse = prepareSearch("test").setQuery(
|
||||
constantScoreQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None))
|
||||
).get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p2"));
|
||||
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "red"), ScoreMode.None)))
|
||||
.get();
|
||||
searchResponse = prepareSearch("test").setQuery(
|
||||
constantScoreQuery(hasChildQuery("child", termQuery("c_field", "red"), ScoreMode.None))
|
||||
).get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L));
|
||||
assertThat(searchResponse.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1")));
|
||||
|
@ -385,14 +372,9 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
|
||||
refresh();
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(
|
||||
hasChildQuery(
|
||||
"child",
|
||||
boolQuery().should(termQuery("c_field", "red")).should(termQuery("c_field", "yellow")),
|
||||
ScoreMode.None
|
||||
)
|
||||
)
|
||||
SearchResponse searchResponse = prepareSearch("test").setQuery(
|
||||
hasChildQuery("child", boolQuery().should(termQuery("c_field", "red")).should(termQuery("c_field", "yellow")), ScoreMode.None)
|
||||
)
|
||||
.addAggregation(
|
||||
AggregationBuilders.global("global")
|
||||
.subAggregation(
|
||||
|
@ -431,9 +413,9 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
|
||||
refresh();
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None)))
|
||||
.get();
|
||||
SearchResponse searchResponse = prepareSearch("test").setQuery(
|
||||
constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))
|
||||
).get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1"));
|
||||
|
@ -444,9 +426,9 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
createIndexRequest("test", "parent", "p1", null, "p_field", "p_value1_updated").get();
|
||||
indicesAdmin().prepareRefresh().get();
|
||||
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None)))
|
||||
.get();
|
||||
searchResponse = prepareSearch("test").setQuery(
|
||||
constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))
|
||||
).get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1"));
|
||||
|
@ -468,14 +450,12 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
refresh();
|
||||
|
||||
assertNoFailures(
|
||||
client().prepareSearch("test")
|
||||
.setSearchType(SearchType.DFS_QUERY_THEN_FETCH)
|
||||
prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH)
|
||||
.setQuery(boolQuery().mustNot(hasChildQuery("child", boolQuery().should(queryStringQuery("c_field:*")), ScoreMode.None)))
|
||||
);
|
||||
|
||||
assertNoFailures(
|
||||
client().prepareSearch("test")
|
||||
.setSearchType(SearchType.DFS_QUERY_THEN_FETCH)
|
||||
prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH)
|
||||
.setQuery(boolQuery().mustNot(hasParentQuery("parent", boolQuery().should(queryStringQuery("p_field:*")), false)))
|
||||
);
|
||||
}
|
||||
|
@ -490,15 +470,15 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
client().prepareIndex("test").setId("3").setSource("p_field", 1).get();
|
||||
refresh();
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(boolQuery().must(matchAllQuery()).filter(hasChildQuery("child", matchAllQuery(), ScoreMode.None)))
|
||||
.get();
|
||||
SearchResponse searchResponse = prepareSearch("test").setQuery(
|
||||
boolQuery().must(matchAllQuery()).filter(hasChildQuery("child", matchAllQuery(), ScoreMode.None))
|
||||
).get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", matchAllQuery(), false)))
|
||||
.get();
|
||||
searchResponse = prepareSearch("test").setQuery(
|
||||
boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", matchAllQuery(), false))
|
||||
).get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||
}
|
||||
|
@ -512,24 +492,18 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
createIndexRequest("test", "child", "c1", parentId, "c_field", "1").get();
|
||||
refresh();
|
||||
|
||||
assertHitCount(
|
||||
client().prepareSearch("test").setSize(0).setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.Max)),
|
||||
1L
|
||||
);
|
||||
assertHitCount(prepareSearch("test").setSize(0).setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.Max)), 1L);
|
||||
|
||||
assertHitCount(client().prepareSearch("test").setSize(0).setQuery(hasParentQuery("parent", termQuery("p_field", "1"), true)), 1L);
|
||||
assertHitCount(prepareSearch("test").setSize(0).setQuery(hasParentQuery("parent", termQuery("p_field", "1"), true)), 1L);
|
||||
|
||||
assertHitCount(
|
||||
client().prepareSearch("test")
|
||||
.setSize(0)
|
||||
prepareSearch("test").setSize(0)
|
||||
.setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.None))),
|
||||
1L
|
||||
);
|
||||
|
||||
assertHitCount(
|
||||
client().prepareSearch("test")
|
||||
.setSize(0)
|
||||
.setQuery(constantScoreQuery(hasParentQuery("parent", termQuery("p_field", "1"), false))),
|
||||
prepareSearch("test").setSize(0).setQuery(constantScoreQuery(hasParentQuery("parent", termQuery("p_field", "1"), false))),
|
||||
1L
|
||||
);
|
||||
}
|
||||
|
@ -543,17 +517,13 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
createIndexRequest("test", "child", "c1", parentId, "c_field", "1").get();
|
||||
refresh();
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setExplain(true)
|
||||
SearchResponse searchResponse = prepareSearch("test").setExplain(true)
|
||||
.setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.Max))
|
||||
.get();
|
||||
assertHitCount(searchResponse, 1L);
|
||||
assertThat(searchResponse.getHits().getAt(0).getExplanation().getDescription(), containsString("join value p1"));
|
||||
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setExplain(true)
|
||||
.setQuery(hasParentQuery("parent", termQuery("p_field", "1"), true))
|
||||
.get();
|
||||
searchResponse = prepareSearch("test").setExplain(true).setQuery(hasParentQuery("parent", termQuery("p_field", "1"), true)).get();
|
||||
assertHitCount(searchResponse, 1L);
|
||||
assertThat(searchResponse.getHits().getAt(0).getExplanation().getDescription(), containsString("join value p1"));
|
||||
|
||||
|
@ -617,16 +587,14 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
ensureGreen();
|
||||
|
||||
indexRandom(true, createDocBuilders().toArray(new IndexRequestBuilder[0]));
|
||||
SearchResponse response = client().prepareSearch("test")
|
||||
.setQuery(
|
||||
hasChildQuery(
|
||||
"child",
|
||||
QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0), fieldValueFactorFunction("c_field1"))
|
||||
.boostMode(CombineFunction.REPLACE),
|
||||
ScoreMode.Total
|
||||
)
|
||||
SearchResponse response = prepareSearch("test").setQuery(
|
||||
hasChildQuery(
|
||||
"child",
|
||||
QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0), fieldValueFactorFunction("c_field1"))
|
||||
.boostMode(CombineFunction.REPLACE),
|
||||
ScoreMode.Total
|
||||
)
|
||||
.get();
|
||||
).get();
|
||||
|
||||
assertThat(response.getHits().getTotalHits().value, equalTo(3L));
|
||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("1"));
|
||||
|
@ -636,16 +604,14 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
assertThat(response.getHits().getHits()[2].getId(), equalTo("2"));
|
||||
assertThat(response.getHits().getHits()[2].getScore(), equalTo(3f));
|
||||
|
||||
response = client().prepareSearch("test")
|
||||
.setQuery(
|
||||
hasChildQuery(
|
||||
"child",
|
||||
QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0), fieldValueFactorFunction("c_field1"))
|
||||
.boostMode(CombineFunction.REPLACE),
|
||||
ScoreMode.Max
|
||||
)
|
||||
response = prepareSearch("test").setQuery(
|
||||
hasChildQuery(
|
||||
"child",
|
||||
QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0), fieldValueFactorFunction("c_field1"))
|
||||
.boostMode(CombineFunction.REPLACE),
|
||||
ScoreMode.Max
|
||||
)
|
||||
.get();
|
||||
).get();
|
||||
|
||||
assertThat(response.getHits().getTotalHits().value, equalTo(3L));
|
||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("3"));
|
||||
|
@ -655,16 +621,14 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
assertThat(response.getHits().getHits()[2].getId(), equalTo("1"));
|
||||
assertThat(response.getHits().getHits()[2].getScore(), equalTo(2f));
|
||||
|
||||
response = client().prepareSearch("test")
|
||||
.setQuery(
|
||||
hasChildQuery(
|
||||
"child",
|
||||
QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0), fieldValueFactorFunction("c_field1"))
|
||||
.boostMode(CombineFunction.REPLACE),
|
||||
ScoreMode.Avg
|
||||
)
|
||||
response = prepareSearch("test").setQuery(
|
||||
hasChildQuery(
|
||||
"child",
|
||||
QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0), fieldValueFactorFunction("c_field1"))
|
||||
.boostMode(CombineFunction.REPLACE),
|
||||
ScoreMode.Avg
|
||||
)
|
||||
.get();
|
||||
).get();
|
||||
|
||||
assertThat(response.getHits().getTotalHits().value, equalTo(3L));
|
||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("3"));
|
||||
|
@ -674,18 +638,14 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
assertThat(response.getHits().getHits()[2].getId(), equalTo("1"));
|
||||
assertThat(response.getHits().getHits()[2].getScore(), equalTo(1.5f));
|
||||
|
||||
response = client().prepareSearch("test")
|
||||
.setQuery(
|
||||
hasParentQuery(
|
||||
"parent",
|
||||
QueryBuilders.functionScoreQuery(matchQuery("p_field1", "p_value3"), fieldValueFactorFunction("p_field2"))
|
||||
.boostMode(CombineFunction.REPLACE),
|
||||
true
|
||||
)
|
||||
response = prepareSearch("test").setQuery(
|
||||
hasParentQuery(
|
||||
"parent",
|
||||
QueryBuilders.functionScoreQuery(matchQuery("p_field1", "p_value3"), fieldValueFactorFunction("p_field2"))
|
||||
.boostMode(CombineFunction.REPLACE),
|
||||
true
|
||||
)
|
||||
.addSort(SortBuilders.fieldSort("c_field3"))
|
||||
.addSort(SortBuilders.scoreSort())
|
||||
.get();
|
||||
).addSort(SortBuilders.fieldSort("c_field3")).addSort(SortBuilders.scoreSort()).get();
|
||||
|
||||
assertThat(response.getHits().getTotalHits().value, equalTo(7L));
|
||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("16"));
|
||||
|
@ -709,9 +669,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
assertAcked(prepareCreate("test").setMapping(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")));
|
||||
ensureGreen();
|
||||
|
||||
SearchResponse response = client().prepareSearch("test")
|
||||
.setQuery(hasChildQuery("child", matchQuery("text", "value"), ScoreMode.None))
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("test").setQuery(hasChildQuery("child", matchQuery("text", "value"), ScoreMode.None)).get();
|
||||
assertNoFailures(response);
|
||||
assertThat(response.getHits().getTotalHits().value, equalTo(0L));
|
||||
|
||||
|
@ -720,19 +678,19 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
.setRefreshPolicy(RefreshPolicy.IMMEDIATE)
|
||||
.get();
|
||||
|
||||
response = client().prepareSearch("test").setQuery(hasChildQuery("child", matchQuery("text", "value"), ScoreMode.None)).get();
|
||||
response = prepareSearch("test").setQuery(hasChildQuery("child", matchQuery("text", "value"), ScoreMode.None)).get();
|
||||
assertNoFailures(response);
|
||||
assertThat(response.getHits().getTotalHits().value, equalTo(0L));
|
||||
|
||||
response = client().prepareSearch("test").setQuery(hasChildQuery("child", matchQuery("text", "value"), ScoreMode.Max)).get();
|
||||
response = prepareSearch("test").setQuery(hasChildQuery("child", matchQuery("text", "value"), ScoreMode.Max)).get();
|
||||
assertNoFailures(response);
|
||||
assertThat(response.getHits().getTotalHits().value, equalTo(0L));
|
||||
|
||||
response = client().prepareSearch("test").setQuery(hasParentQuery("parent", matchQuery("text", "value"), false)).get();
|
||||
response = prepareSearch("test").setQuery(hasParentQuery("parent", matchQuery("text", "value"), false)).get();
|
||||
assertNoFailures(response);
|
||||
assertThat(response.getHits().getTotalHits().value, equalTo(0L));
|
||||
|
||||
response = client().prepareSearch("test").setQuery(hasParentQuery("parent", matchQuery("text", "value"), true)).get();
|
||||
response = prepareSearch("test").setQuery(hasParentQuery("parent", matchQuery("text", "value"), true)).get();
|
||||
assertNoFailures(response);
|
||||
assertThat(response.getHits().getTotalHits().value, equalTo(0L));
|
||||
}
|
||||
|
@ -748,16 +706,16 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
client().prepareIndex("test").setId("3").setSource("p_field", 2).get();
|
||||
|
||||
refresh();
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(boolQuery().must(matchAllQuery()).filter(hasChildQuery("child", termQuery("c_field", 1), ScoreMode.None)))
|
||||
.get();
|
||||
SearchResponse searchResponse = prepareSearch("test").setQuery(
|
||||
boolQuery().must(matchAllQuery()).filter(hasChildQuery("child", termQuery("c_field", 1), ScoreMode.None))
|
||||
).get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||
assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1"));
|
||||
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", termQuery("p_field", 1), false)))
|
||||
.get();
|
||||
searchResponse = prepareSearch("test").setQuery(
|
||||
boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", termQuery("p_field", 1), false))
|
||||
).get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||
assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("2"));
|
||||
|
@ -771,15 +729,13 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
createIndexRequest("test", "child", "2", "1", "c_field", "foo bar").get();
|
||||
refresh();
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(
|
||||
hasChildQuery("child", matchQuery("c_field", "foo"), ScoreMode.None).innerHit(
|
||||
new InnerHitBuilder().setHighlightBuilder(
|
||||
new HighlightBuilder().field(new Field("c_field").highlightQuery(QueryBuilders.matchQuery("c_field", "bar")))
|
||||
)
|
||||
SearchResponse searchResponse = prepareSearch("test").setQuery(
|
||||
hasChildQuery("child", matchQuery("c_field", "foo"), ScoreMode.None).innerHit(
|
||||
new InnerHitBuilder().setHighlightBuilder(
|
||||
new HighlightBuilder().field(new Field("c_field").highlightQuery(QueryBuilders.matchQuery("c_field", "bar")))
|
||||
)
|
||||
)
|
||||
.get();
|
||||
).get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||
assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1"));
|
||||
|
@ -799,26 +755,24 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
createIndexRequest("test", "child", "2", "1", "c_field", 1).get();
|
||||
refresh();
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(boolQuery().must(matchAllQuery()).filter(hasChildQuery("child", matchQuery("c_field", 1), ScoreMode.None)))
|
||||
.get();
|
||||
SearchResponse searchResponse = prepareSearch("test").setQuery(
|
||||
boolQuery().must(matchAllQuery()).filter(hasChildQuery("child", matchQuery("c_field", 1), ScoreMode.None))
|
||||
).get();
|
||||
assertSearchHit(searchResponse, 1, hasId("1"));
|
||||
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", matchQuery("p_field", 1), false)))
|
||||
.get();
|
||||
searchResponse = prepareSearch("test").setQuery(
|
||||
boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", matchQuery("p_field", 1), false))
|
||||
).get();
|
||||
assertSearchHit(searchResponse, 1, hasId("2"));
|
||||
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(
|
||||
boolQuery().must(matchAllQuery()).filter(boolQuery().must(hasChildQuery("child", matchQuery("c_field", 1), ScoreMode.None)))
|
||||
)
|
||||
.get();
|
||||
searchResponse = prepareSearch("test").setQuery(
|
||||
boolQuery().must(matchAllQuery()).filter(boolQuery().must(hasChildQuery("child", matchQuery("c_field", 1), ScoreMode.None)))
|
||||
).get();
|
||||
assertSearchHit(searchResponse, 1, hasId("1"));
|
||||
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(boolQuery().must(matchAllQuery()).filter(boolQuery().must(hasParentQuery("parent", matchQuery("p_field", 1), false))))
|
||||
.get();
|
||||
searchResponse = prepareSearch("test").setQuery(
|
||||
boolQuery().must(matchAllQuery()).filter(boolQuery().must(hasParentQuery("parent", matchQuery("p_field", 1), false)))
|
||||
).get();
|
||||
assertSearchHit(searchResponse, 1, hasId("2"));
|
||||
}
|
||||
|
||||
|
@ -852,8 +806,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
|
||||
SearchType[] searchTypes = new SearchType[] { SearchType.QUERY_THEN_FETCH, SearchType.DFS_QUERY_THEN_FETCH };
|
||||
for (SearchType searchType : searchTypes) {
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setSearchType(searchType)
|
||||
SearchResponse searchResponse = prepareSearch("test").setSearchType(searchType)
|
||||
.setQuery(hasChildQuery("child", prefixQuery("c_field", "c"), ScoreMode.Max))
|
||||
.addSort("p_field", SortOrder.ASC)
|
||||
.setSize(5)
|
||||
|
@ -866,8 +819,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
assertThat(searchResponse.getHits().getHits()[3].getId(), equalTo("p003"));
|
||||
assertThat(searchResponse.getHits().getHits()[4].getId(), equalTo("p004"));
|
||||
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setSearchType(searchType)
|
||||
searchResponse = prepareSearch("test").setSearchType(searchType)
|
||||
.setQuery(hasParentQuery("parent", prefixQuery("p_field", "p"), true))
|
||||
.addSort("c_field", SortOrder.ASC)
|
||||
.setSize(5)
|
||||
|
@ -897,17 +849,17 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
|
||||
refresh();
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.Total))
|
||||
.get();
|
||||
SearchResponse searchResponse = prepareSearch("test").setQuery(
|
||||
hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.Total)
|
||||
).get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1"));
|
||||
assertThat(searchResponse.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1\""));
|
||||
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(boolQuery().must(matchQuery("c_field", "x")).must(hasParentQuery("parent", termQuery("p_field", "p_value2"), true)))
|
||||
.get();
|
||||
searchResponse = prepareSearch("test").setQuery(
|
||||
boolQuery().must(matchQuery("c_field", "x")).must(hasParentQuery("parent", termQuery("p_field", "p_value2"), true))
|
||||
).get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L));
|
||||
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("c3"));
|
||||
|
@ -922,17 +874,15 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
indicesAdmin().prepareRefresh("test").get();
|
||||
}
|
||||
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.Total))
|
||||
.get();
|
||||
searchResponse = prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.Total)).get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1"));
|
||||
assertThat(searchResponse.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1\""));
|
||||
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(boolQuery().must(matchQuery("c_field", "x")).must(hasParentQuery("parent", termQuery("p_field", "p_value2"), true)))
|
||||
.get();
|
||||
searchResponse = prepareSearch("test").setQuery(
|
||||
boolQuery().must(matchQuery("c_field", "x")).must(hasParentQuery("parent", termQuery("p_field", "p_value2"), true))
|
||||
).get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L));
|
||||
assertThat(searchResponse.getHits().getAt(0).getId(), Matchers.anyOf(equalTo("c3"), equalTo("c4")));
|
||||
|
@ -953,8 +903,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
createIndexRequest("test", "child", "c5", "p2", "c_field", "x").get();
|
||||
refresh();
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(hasChildQuery("child", matchAllQuery(), ScoreMode.Total))
|
||||
SearchResponse searchResponse = prepareSearch("test").setQuery(hasChildQuery("child", matchAllQuery(), ScoreMode.Total))
|
||||
.setMinScore(3) // Score needs to be 3 or above!
|
||||
.get();
|
||||
assertNoFailures(searchResponse);
|
||||
|
@ -971,8 +920,9 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
ensureGreen();
|
||||
|
||||
assertHitCount(
|
||||
client().prepareSearch("test")
|
||||
.setQuery(boolQuery().filter(termQuery("join_field#parent", "p1")).filter(termQuery("join_field", "child"))),
|
||||
prepareSearch("test").setQuery(
|
||||
boolQuery().filter(termQuery("join_field#parent", "p1")).filter(termQuery("join_field", "child"))
|
||||
),
|
||||
0L
|
||||
);
|
||||
|
||||
|
@ -980,19 +930,19 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
refresh();
|
||||
|
||||
assertHitCount(
|
||||
client().prepareSearch("test")
|
||||
.setQuery(boolQuery().filter(termQuery("join_field#parent", "p1")).filter(termQuery("join_field", "child"))),
|
||||
prepareSearch("test").setQuery(
|
||||
boolQuery().filter(termQuery("join_field#parent", "p1")).filter(termQuery("join_field", "child"))
|
||||
),
|
||||
1L
|
||||
);
|
||||
|
||||
createIndexRequest("test", "child", "c2", "p2").get();
|
||||
refresh();
|
||||
assertHitCount(
|
||||
client().prepareSearch("test")
|
||||
.setQuery(
|
||||
boolQuery().should(boolQuery().filter(termQuery("join_field#parent", "p1")).filter(termQuery("join_field", "child")))
|
||||
.should(boolQuery().filter(termQuery("join_field#parent", "p2")).filter(termQuery("join_field", "child")))
|
||||
),
|
||||
prepareSearch("test").setQuery(
|
||||
boolQuery().should(boolQuery().filter(termQuery("join_field#parent", "p1")).filter(termQuery("join_field", "child")))
|
||||
.should(boolQuery().filter(termQuery("join_field#parent", "p2")).filter(termQuery("join_field", "child")))
|
||||
),
|
||||
2L
|
||||
);
|
||||
}
|
||||
|
@ -1007,15 +957,12 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
createIndexRequest("test", "child", "c1", "p1").get();
|
||||
refresh();
|
||||
|
||||
assertHitCount(client().prepareSearch("test").setQuery(parentId("child", "p1")), 1L);
|
||||
assertHitCount(prepareSearch("test").setQuery(parentId("child", "p1")), 1L);
|
||||
|
||||
createIndexRequest("test", "child", "c2", "p2").get();
|
||||
refresh();
|
||||
|
||||
assertHitCount(
|
||||
client().prepareSearch("test").setQuery(boolQuery().should(parentId("child", "p1")).should(parentId("child", "p2"))),
|
||||
2L
|
||||
);
|
||||
assertHitCount(prepareSearch("test").setQuery(boolQuery().should(parentId("child", "p1")).should(parentId("child", "p2"))), 2L);
|
||||
}
|
||||
|
||||
public void testHasChildNotBeingCached() throws IOException {
|
||||
|
@ -1037,18 +984,18 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
indicesAdmin().prepareFlush("test").get();
|
||||
indicesAdmin().prepareRefresh("test").get();
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None)))
|
||||
.get();
|
||||
SearchResponse searchResponse = prepareSearch("test").setQuery(
|
||||
constantScoreQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None))
|
||||
).get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||
|
||||
createIndexRequest("test", "child", "c2", "p2", "c_field", "blue").get();
|
||||
indicesAdmin().prepareRefresh("test").get();
|
||||
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None)))
|
||||
.get();
|
||||
searchResponse = prepareSearch("test").setQuery(
|
||||
constantScoreQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None))
|
||||
).get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L));
|
||||
}
|
||||
|
@ -1104,34 +1051,32 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
refresh();
|
||||
|
||||
assertHitCount(
|
||||
client().prepareSearch("grandissue")
|
||||
.setQuery(
|
||||
boolQuery().must(
|
||||
hasChildQuery(
|
||||
"parent",
|
||||
boolQuery().must(
|
||||
hasChildQuery("child_type_one", boolQuery().must(queryStringQuery("name:William*")), ScoreMode.None)
|
||||
),
|
||||
ScoreMode.None
|
||||
)
|
||||
prepareSearch("grandissue").setQuery(
|
||||
boolQuery().must(
|
||||
hasChildQuery(
|
||||
"parent",
|
||||
boolQuery().must(
|
||||
hasChildQuery("child_type_one", boolQuery().must(queryStringQuery("name:William*")), ScoreMode.None)
|
||||
),
|
||||
ScoreMode.None
|
||||
)
|
||||
),
|
||||
)
|
||||
),
|
||||
1L
|
||||
);
|
||||
|
||||
assertHitCount(
|
||||
client().prepareSearch("grandissue")
|
||||
.setQuery(
|
||||
boolQuery().must(
|
||||
hasChildQuery(
|
||||
"parent",
|
||||
boolQuery().must(
|
||||
hasChildQuery("child_type_two", boolQuery().must(queryStringQuery("name:William*")), ScoreMode.None)
|
||||
),
|
||||
ScoreMode.None
|
||||
)
|
||||
prepareSearch("grandissue").setQuery(
|
||||
boolQuery().must(
|
||||
hasChildQuery(
|
||||
"parent",
|
||||
boolQuery().must(
|
||||
hasChildQuery("child_type_two", boolQuery().must(queryStringQuery("name:William*")), ScoreMode.None)
|
||||
),
|
||||
ScoreMode.None
|
||||
)
|
||||
),
|
||||
)
|
||||
),
|
||||
0L
|
||||
);
|
||||
}
|
||||
|
@ -1196,21 +1141,17 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
refresh();
|
||||
|
||||
ScoreMode scoreMode = randomFrom(ScoreMode.values());
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(
|
||||
boolQuery().must(hasChildQuery("child", termQuery("c_field", "blue"), scoreMode))
|
||||
.filter(boolQuery().mustNot(termQuery("p_field", "3")))
|
||||
)
|
||||
.get();
|
||||
SearchResponse searchResponse = prepareSearch("test").setQuery(
|
||||
boolQuery().must(hasChildQuery("child", termQuery("c_field", "blue"), scoreMode))
|
||||
.filter(boolQuery().mustNot(termQuery("p_field", "3")))
|
||||
).get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(
|
||||
boolQuery().must(hasChildQuery("child", termQuery("c_field", "red"), scoreMode))
|
||||
.filter(boolQuery().mustNot(termQuery("p_field", "3")))
|
||||
)
|
||||
.get();
|
||||
searchResponse = prepareSearch("test").setQuery(
|
||||
boolQuery().must(hasChildQuery("child", termQuery("c_field", "red"), scoreMode))
|
||||
.filter(boolQuery().mustNot(termQuery("p_field", "3")))
|
||||
).get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L));
|
||||
}
|
||||
|
@ -1224,30 +1165,28 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
createIndexRequest("test", "child", "c1", parentId, "c_field", "1").get();
|
||||
refresh();
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.Max).queryName("test"))
|
||||
.get();
|
||||
SearchResponse searchResponse = prepareSearch("test").setQuery(
|
||||
hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.Max).queryName("test")
|
||||
).get();
|
||||
assertHitCount(searchResponse, 1L);
|
||||
assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(1));
|
||||
assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("test"));
|
||||
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(hasParentQuery("parent", termQuery("p_field", "1"), true).queryName("test"))
|
||||
.get();
|
||||
searchResponse = prepareSearch("test").setQuery(hasParentQuery("parent", termQuery("p_field", "1"), true).queryName("test")).get();
|
||||
assertHitCount(searchResponse, 1L);
|
||||
assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(1));
|
||||
assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("test"));
|
||||
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.None).queryName("test")))
|
||||
.get();
|
||||
searchResponse = prepareSearch("test").setQuery(
|
||||
constantScoreQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.None).queryName("test"))
|
||||
).get();
|
||||
assertHitCount(searchResponse, 1L);
|
||||
assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(1));
|
||||
assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("test"));
|
||||
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(constantScoreQuery(hasParentQuery("parent", termQuery("p_field", "1"), false).queryName("test")))
|
||||
.get();
|
||||
searchResponse = prepareSearch("test").setQuery(
|
||||
constantScoreQuery(hasParentQuery("parent", termQuery("p_field", "1"), false).queryName("test"))
|
||||
).get();
|
||||
assertHitCount(searchResponse, 1L);
|
||||
assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(1));
|
||||
assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("test"));
|
||||
|
@ -1262,35 +1201,35 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
refresh();
|
||||
|
||||
try {
|
||||
client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.None)).get();
|
||||
prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.None)).get();
|
||||
fail();
|
||||
} catch (SearchPhaseExecutionException e) {
|
||||
assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST));
|
||||
}
|
||||
|
||||
try {
|
||||
client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.Max)).get();
|
||||
prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.Max)).get();
|
||||
fail();
|
||||
} catch (SearchPhaseExecutionException e) {
|
||||
assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST));
|
||||
}
|
||||
|
||||
try {
|
||||
client().prepareSearch("test").setPostFilter(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.None)).get();
|
||||
prepareSearch("test").setPostFilter(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.None)).get();
|
||||
fail();
|
||||
} catch (SearchPhaseExecutionException e) {
|
||||
assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST));
|
||||
}
|
||||
|
||||
try {
|
||||
client().prepareSearch("test").setQuery(hasParentQuery("parent", termQuery("p_field", "1"), true)).get();
|
||||
prepareSearch("test").setQuery(hasParentQuery("parent", termQuery("p_field", "1"), true)).get();
|
||||
fail();
|
||||
} catch (SearchPhaseExecutionException e) {
|
||||
assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST));
|
||||
}
|
||||
|
||||
try {
|
||||
client().prepareSearch("test").setPostFilter(hasParentQuery("parent", termQuery("p_field", "1"), false)).get();
|
||||
prepareSearch("test").setPostFilter(hasParentQuery("parent", termQuery("p_field", "1"), false)).get();
|
||||
fail();
|
||||
} catch (SearchPhaseExecutionException e) {
|
||||
assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST));
|
||||
|
@ -1361,8 +1300,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", matchAllQuery(), false)) };
|
||||
|
||||
for (QueryBuilder query : queries) {
|
||||
SearchResponse scrollResponse = client().prepareSearch("test")
|
||||
.setScroll(TimeValue.timeValueSeconds(30))
|
||||
SearchResponse scrollResponse = prepareSearch("test").setScroll(TimeValue.timeValueSeconds(30))
|
||||
.setSize(1)
|
||||
.addStoredField("_id")
|
||||
.setQuery(query)
|
||||
|
@ -1422,7 +1360,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
scoreMode
|
||||
).minMaxChildren(minChildren, maxChildren != null ? maxChildren : HasChildQueryBuilder.DEFAULT_MAX_CHILDREN);
|
||||
|
||||
return client().prepareSearch("test").setQuery(hasChildQuery).addSort("_score", SortOrder.DESC).addSort("id", SortOrder.ASC).get();
|
||||
return prepareSearch("test").setQuery(hasChildQuery).addSort("_score", SortOrder.DESC).addSort("id", SortOrder.ASC).get();
|
||||
}
|
||||
|
||||
public void testMinMaxChildren() throws Exception {
|
||||
|
@ -1707,12 +1645,12 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
|
||||
// make sure that when we explicitly set a type, the inner query is executed in the context of the child type instead
|
||||
assertSearchHits(
|
||||
client().prepareSearch("test").setQuery(hasChildQuery("child-type", new IdsQueryBuilder().addIds("child-id"), ScoreMode.None)),
|
||||
prepareSearch("test").setQuery(hasChildQuery("child-type", new IdsQueryBuilder().addIds("child-id"), ScoreMode.None)),
|
||||
"parent-id"
|
||||
);
|
||||
// make sure that when we explicitly set a type, the inner query is executed in the context of the parent type instead
|
||||
assertSearchHits(
|
||||
client().prepareSearch("test").setQuery(hasParentQuery("parent-type", new IdsQueryBuilder().addIds("parent-id"), false)),
|
||||
prepareSearch("test").setQuery(hasParentQuery("parent-type", new IdsQueryBuilder().addIds("parent-id"), false)),
|
||||
"child-id"
|
||||
);
|
||||
}
|
||||
|
@ -1744,25 +1682,19 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
String[] highlightTypes = new String[] { "plain", "fvh", "unified" };
|
||||
for (String highlightType : highlightTypes) {
|
||||
logger.info("Testing with highlight type [{}]", highlightType);
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(
|
||||
new BoolQueryBuilder().must(new MatchQueryBuilder("searchText", "fox"))
|
||||
.must(new HasChildQueryBuilder("child-type", new MatchAllQueryBuilder(), ScoreMode.None))
|
||||
)
|
||||
.highlighter(new HighlightBuilder().field(new HighlightBuilder.Field("searchText").highlighterType(highlightType)))
|
||||
.get();
|
||||
SearchResponse searchResponse = prepareSearch("test").setQuery(
|
||||
new BoolQueryBuilder().must(new MatchQueryBuilder("searchText", "fox"))
|
||||
.must(new HasChildQueryBuilder("child-type", new MatchAllQueryBuilder(), ScoreMode.None))
|
||||
).highlighter(new HighlightBuilder().field(new HighlightBuilder.Field("searchText").highlighterType(highlightType))).get();
|
||||
assertHitCount(searchResponse, 1);
|
||||
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("parent-id"));
|
||||
HighlightField highlightField = searchResponse.getHits().getAt(0).getHighlightFields().get("searchText");
|
||||
assertThat(highlightField.getFragments()[0].string(), equalTo("quick brown <em>fox</em>"));
|
||||
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(
|
||||
new BoolQueryBuilder().must(new MatchQueryBuilder("searchText", "fox"))
|
||||
.must(new HasParentQueryBuilder("parent-type", new MatchAllQueryBuilder(), false))
|
||||
)
|
||||
.highlighter(new HighlightBuilder().field(new HighlightBuilder.Field("searchText").highlighterType(highlightType)))
|
||||
.get();
|
||||
searchResponse = prepareSearch("test").setQuery(
|
||||
new BoolQueryBuilder().must(new MatchQueryBuilder("searchText", "fox"))
|
||||
.must(new HasParentQueryBuilder("parent-type", new MatchAllQueryBuilder(), false))
|
||||
).highlighter(new HighlightBuilder().field(new HighlightBuilder.Field("searchText").highlighterType(highlightType))).get();
|
||||
assertHitCount(searchResponse, 1);
|
||||
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("child-id"));
|
||||
highlightField = searchResponse.getHits().getAt(0).getHighlightFields().get("searchText");
|
||||
|
@ -1783,10 +1715,10 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||
);
|
||||
assertAcked(indicesAdmin().prepareAliases().addAlias("my-index", "filter2", hasParentQuery("parent", matchAllQuery(), false)));
|
||||
|
||||
SearchResponse response = client().prepareSearch("filter1").get();
|
||||
SearchResponse response = prepareSearch("filter1").get();
|
||||
assertHitCount(response, 1);
|
||||
assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
|
||||
response = client().prepareSearch("filter2").get();
|
||||
response = prepareSearch("filter2").get();
|
||||
assertHitCount(response, 1);
|
||||
assertThat(response.getHits().getAt(0).getId(), equalTo("2"));
|
||||
}
|
||||
|
|
|
@ -114,9 +114,9 @@ public class InnerHitsIT extends ParentChildTestCase {
|
|||
requests.add(createIndexRequest("articles", "comment", "c6", "p2", "message", "elephant scared by mice x y"));
|
||||
indexRandom(true, requests);
|
||||
|
||||
SearchResponse response = client().prepareSearch("articles")
|
||||
.setQuery(hasChildQuery("comment", matchQuery("message", "fox"), ScoreMode.None).innerHit(new InnerHitBuilder()))
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("articles").setQuery(
|
||||
hasChildQuery("comment", matchQuery("message", "fox"), ScoreMode.None).innerHit(new InnerHitBuilder())
|
||||
).get();
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 1);
|
||||
assertSearchHit(response, 1, hasId("p1"));
|
||||
|
@ -130,13 +130,11 @@ public class InnerHitsIT extends ParentChildTestCase {
|
|||
assertThat(innerHits.getAt(1).getId(), equalTo("c2"));
|
||||
|
||||
final boolean seqNoAndTerm = randomBoolean();
|
||||
response = client().prepareSearch("articles")
|
||||
.setQuery(
|
||||
hasChildQuery("comment", matchQuery("message", "elephant"), ScoreMode.None).innerHit(
|
||||
new InnerHitBuilder().setSeqNoAndPrimaryTerm(seqNoAndTerm)
|
||||
)
|
||||
response = prepareSearch("articles").setQuery(
|
||||
hasChildQuery("comment", matchQuery("message", "elephant"), ScoreMode.None).innerHit(
|
||||
new InnerHitBuilder().setSeqNoAndPrimaryTerm(seqNoAndTerm)
|
||||
)
|
||||
.get();
|
||||
).get();
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 1);
|
||||
assertSearchHit(response, 1, hasId("p2"));
|
||||
|
@ -165,17 +163,15 @@ public class InnerHitsIT extends ParentChildTestCase {
|
|||
assertThat(innerHits.getAt(2).getSeqNo(), equalTo(UNASSIGNED_SEQ_NO));
|
||||
}
|
||||
|
||||
response = client().prepareSearch("articles")
|
||||
.setQuery(
|
||||
hasChildQuery("comment", matchQuery("message", "fox"), ScoreMode.None).innerHit(
|
||||
new InnerHitBuilder().addFetchField("message")
|
||||
.setHighlightBuilder(new HighlightBuilder().field("message"))
|
||||
.setExplain(true)
|
||||
.setSize(1)
|
||||
.addScriptField("script", new Script(ScriptType.INLINE, MockScriptEngine.NAME, "5", Collections.emptyMap()))
|
||||
)
|
||||
response = prepareSearch("articles").setQuery(
|
||||
hasChildQuery("comment", matchQuery("message", "fox"), ScoreMode.None).innerHit(
|
||||
new InnerHitBuilder().addFetchField("message")
|
||||
.setHighlightBuilder(new HighlightBuilder().field("message"))
|
||||
.setExplain(true)
|
||||
.setSize(1)
|
||||
.addScriptField("script", new Script(ScriptType.INLINE, MockScriptEngine.NAME, "5", Collections.emptyMap()))
|
||||
)
|
||||
.get();
|
||||
).get();
|
||||
assertNoFailures(response);
|
||||
innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
|
||||
assertThat(innerHits.getHits().length, equalTo(1));
|
||||
|
@ -184,13 +180,11 @@ public class InnerHitsIT extends ParentChildTestCase {
|
|||
assertThat(innerHits.getAt(0).getFields().get("message").getValue().toString(), equalTo("fox eat quick"));
|
||||
assertThat(innerHits.getAt(0).getFields().get("script").getValue().toString(), equalTo("5"));
|
||||
|
||||
response = client().prepareSearch("articles")
|
||||
.setQuery(
|
||||
hasChildQuery("comment", matchQuery("message", "fox"), ScoreMode.None).innerHit(
|
||||
new InnerHitBuilder().addDocValueField("message").setSize(1)
|
||||
)
|
||||
response = prepareSearch("articles").setQuery(
|
||||
hasChildQuery("comment", matchQuery("message", "fox"), ScoreMode.None).innerHit(
|
||||
new InnerHitBuilder().addDocValueField("message").setSize(1)
|
||||
)
|
||||
.get();
|
||||
).get();
|
||||
assertNoFailures(response);
|
||||
innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
|
||||
assertThat(innerHits.getHits().length, equalTo(1));
|
||||
|
@ -257,11 +251,7 @@ public class InnerHitsIT extends ParentChildTestCase {
|
|||
)
|
||||
)
|
||||
);
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setSize(numDocs)
|
||||
.addSort("id", SortOrder.ASC)
|
||||
.setQuery(boolQuery)
|
||||
.get();
|
||||
SearchResponse searchResponse = prepareSearch("idx").setSize(numDocs).addSort("id", SortOrder.ASC).setQuery(boolQuery).get();
|
||||
|
||||
assertNoFailures(searchResponse);
|
||||
assertHitCount(searchResponse, numDocs);
|
||||
|
@ -330,8 +320,7 @@ public class InnerHitsIT extends ParentChildTestCase {
|
|||
);
|
||||
indexRandom(true, requests);
|
||||
|
||||
SearchResponse response = client().prepareSearch("stack")
|
||||
.addSort("id", SortOrder.ASC)
|
||||
SearchResponse response = prepareSearch("stack").addSort("id", SortOrder.ASC)
|
||||
.setQuery(
|
||||
boolQuery().must(matchQuery("body", "fail2ban"))
|
||||
.must(hasParentQuery("question", matchAllQuery(), false).innerHit(new InnerHitBuilder()))
|
||||
|
@ -373,15 +362,13 @@ public class InnerHitsIT extends ParentChildTestCase {
|
|||
requests.add(createIndexRequest("articles", "remark", "6", "4", "message", "bad").setRouting("2"));
|
||||
indexRandom(true, requests);
|
||||
|
||||
SearchResponse response = client().prepareSearch("articles")
|
||||
.setQuery(
|
||||
hasChildQuery(
|
||||
"comment",
|
||||
hasChildQuery("remark", matchQuery("message", "good"), ScoreMode.None).innerHit(new InnerHitBuilder()),
|
||||
ScoreMode.None
|
||||
).innerHit(new InnerHitBuilder())
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("articles").setQuery(
|
||||
hasChildQuery(
|
||||
"comment",
|
||||
hasChildQuery("remark", matchQuery("message", "good"), ScoreMode.None).innerHit(new InnerHitBuilder()),
|
||||
ScoreMode.None
|
||||
).innerHit(new InnerHitBuilder())
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 1);
|
||||
|
@ -396,15 +383,13 @@ public class InnerHitsIT extends ParentChildTestCase {
|
|||
assertThat(innerHits.getTotalHits().value, equalTo(1L));
|
||||
assertThat(innerHits.getAt(0).getId(), equalTo("5"));
|
||||
|
||||
response = client().prepareSearch("articles")
|
||||
.setQuery(
|
||||
hasChildQuery(
|
||||
"comment",
|
||||
hasChildQuery("remark", matchQuery("message", "bad"), ScoreMode.None).innerHit(new InnerHitBuilder()),
|
||||
ScoreMode.None
|
||||
).innerHit(new InnerHitBuilder())
|
||||
)
|
||||
.get();
|
||||
response = prepareSearch("articles").setQuery(
|
||||
hasChildQuery(
|
||||
"comment",
|
||||
hasChildQuery("remark", matchQuery("message", "bad"), ScoreMode.None).innerHit(new InnerHitBuilder()),
|
||||
ScoreMode.None
|
||||
).innerHit(new InnerHitBuilder())
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 1);
|
||||
|
@ -452,26 +437,22 @@ public class InnerHitsIT extends ParentChildTestCase {
|
|||
requests.add(createIndexRequest("royals", "baron", "baron4", "earl4").setRouting("king"));
|
||||
indexRandom(true, requests);
|
||||
|
||||
SearchResponse response = client().prepareSearch("royals")
|
||||
.setQuery(
|
||||
boolQuery().filter(
|
||||
hasParentQuery(
|
||||
"prince",
|
||||
hasParentQuery("king", matchAllQuery(), false).innerHit(new InnerHitBuilder().setName("kings")),
|
||||
false
|
||||
).innerHit(new InnerHitBuilder().setName("princes"))
|
||||
)
|
||||
.filter(
|
||||
hasChildQuery(
|
||||
"earl",
|
||||
hasChildQuery("baron", matchAllQuery(), ScoreMode.None).innerHit(new InnerHitBuilder().setName("barons")),
|
||||
ScoreMode.None
|
||||
).innerHit(
|
||||
new InnerHitBuilder().addSort(SortBuilders.fieldSort("id").order(SortOrder.ASC)).setName("earls").setSize(4)
|
||||
)
|
||||
)
|
||||
SearchResponse response = prepareSearch("royals").setQuery(
|
||||
boolQuery().filter(
|
||||
hasParentQuery(
|
||||
"prince",
|
||||
hasParentQuery("king", matchAllQuery(), false).innerHit(new InnerHitBuilder().setName("kings")),
|
||||
false
|
||||
).innerHit(new InnerHitBuilder().setName("princes"))
|
||||
)
|
||||
.get();
|
||||
.filter(
|
||||
hasChildQuery(
|
||||
"earl",
|
||||
hasChildQuery("baron", matchAllQuery(), ScoreMode.None).innerHit(new InnerHitBuilder().setName("barons")),
|
||||
ScoreMode.None
|
||||
).innerHit(new InnerHitBuilder().addSort(SortBuilders.fieldSort("id").order(SortOrder.ASC)).setName("earls").setSize(4))
|
||||
)
|
||||
).get();
|
||||
assertHitCount(response, 1);
|
||||
assertThat(response.getHits().getAt(0).getId(), equalTo("duke"));
|
||||
|
||||
|
@ -517,12 +498,9 @@ public class InnerHitsIT extends ParentChildTestCase {
|
|||
requests.add(createIndexRequest("index", "child", "5", "2", "field", "value1"));
|
||||
indexRandom(true, requests);
|
||||
|
||||
SearchResponse response = client().prepareSearch("index")
|
||||
.setQuery(
|
||||
hasChildQuery("child", matchQuery("field", "value1").queryName("_name1"), ScoreMode.None).innerHit(new InnerHitBuilder())
|
||||
)
|
||||
.addSort("id", SortOrder.ASC)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("index").setQuery(
|
||||
hasChildQuery("child", matchQuery("field", "value1").queryName("_name1"), ScoreMode.None).innerHit(new InnerHitBuilder())
|
||||
).addSort("id", SortOrder.ASC).get();
|
||||
assertHitCount(response, 2);
|
||||
assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits().value, equalTo(1L));
|
||||
|
@ -537,7 +515,7 @@ public class InnerHitsIT extends ParentChildTestCase {
|
|||
QueryBuilder query = hasChildQuery("child", matchQuery("field", "value2").queryName("_name2"), ScoreMode.None).innerHit(
|
||||
new InnerHitBuilder()
|
||||
);
|
||||
response = client().prepareSearch("index").setQuery(query).addSort("id", SortOrder.ASC).get();
|
||||
response = prepareSearch("index").setQuery(query).addSort("id", SortOrder.ASC).get();
|
||||
assertHitCount(response, 1);
|
||||
assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits().value, equalTo(1L));
|
||||
|
@ -561,7 +539,7 @@ public class InnerHitsIT extends ParentChildTestCase {
|
|||
QueryBuilder query = hasChildQuery("child", matchQuery("field", "value1"), ScoreMode.None).innerHit(
|
||||
new InnerHitBuilder().setSize(ArrayUtil.MAX_ARRAY_LENGTH - 1)
|
||||
);
|
||||
SearchResponse response = client().prepareSearch("index1").setQuery(query).get();
|
||||
SearchResponse response = prepareSearch("index1").setQuery(query).get();
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 1);
|
||||
}
|
||||
|
@ -579,18 +557,16 @@ public class InnerHitsIT extends ParentChildTestCase {
|
|||
createIndexRequest("test", "parent_type", "1", null, "key", "value").get();
|
||||
createIndexRequest("test", "child_type", "2", "1", "nested_type", Collections.singletonMap("key", "value")).get();
|
||||
refresh();
|
||||
SearchResponse response = client().prepareSearch("test")
|
||||
.setQuery(
|
||||
boolQuery().must(matchQuery("key", "value"))
|
||||
.should(
|
||||
hasChildQuery(
|
||||
"child_type",
|
||||
nestedQuery("nested_type", matchAllQuery(), ScoreMode.None).innerHit(new InnerHitBuilder()),
|
||||
ScoreMode.None
|
||||
).innerHit(new InnerHitBuilder())
|
||||
)
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("test").setQuery(
|
||||
boolQuery().must(matchQuery("key", "value"))
|
||||
.should(
|
||||
hasChildQuery(
|
||||
"child_type",
|
||||
nestedQuery("nested_type", matchAllQuery(), ScoreMode.None).innerHit(new InnerHitBuilder()),
|
||||
ScoreMode.None
|
||||
).innerHit(new InnerHitBuilder())
|
||||
)
|
||||
).get();
|
||||
assertHitCount(response, 1);
|
||||
SearchHit hit = response.getHits().getAt(0);
|
||||
String parentId = (String) extractValue("join_field.parent", hit.getInnerHits().get("child_type").getAt(0).getSourceAsMap());
|
||||
|
@ -640,22 +616,19 @@ public class InnerHitsIT extends ParentChildTestCase {
|
|||
createIndexRequest("index1", "child_type", "2", "1").get();
|
||||
refresh();
|
||||
assertHitCountAndNoFailures(
|
||||
client().prepareSearch("index1")
|
||||
.setQuery(
|
||||
hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true)
|
||||
.innerHit(new InnerHitBuilder().setFrom(50).setSize(10).setName("_name"))
|
||||
),
|
||||
prepareSearch("index1").setQuery(
|
||||
hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true)
|
||||
.innerHit(new InnerHitBuilder().setFrom(50).setSize(10).setName("_name"))
|
||||
),
|
||||
1
|
||||
);
|
||||
|
||||
Exception e = expectThrows(
|
||||
SearchPhaseExecutionException.class,
|
||||
() -> client().prepareSearch("index1")
|
||||
.setQuery(
|
||||
hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true)
|
||||
.innerHit(new InnerHitBuilder().setFrom(100).setSize(10).setName("_name"))
|
||||
)
|
||||
.get()
|
||||
() -> prepareSearch("index1").setQuery(
|
||||
hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true)
|
||||
.innerHit(new InnerHitBuilder().setFrom(100).setSize(10).setName("_name"))
|
||||
).get()
|
||||
);
|
||||
assertThat(
|
||||
e.getCause().getMessage(),
|
||||
|
@ -663,12 +636,10 @@ public class InnerHitsIT extends ParentChildTestCase {
|
|||
);
|
||||
e = expectThrows(
|
||||
SearchPhaseExecutionException.class,
|
||||
() -> client().prepareSearch("index1")
|
||||
.setQuery(
|
||||
hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true)
|
||||
.innerHit(new InnerHitBuilder().setFrom(10).setSize(100).setName("_name"))
|
||||
)
|
||||
.get()
|
||||
() -> prepareSearch("index1").setQuery(
|
||||
hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true)
|
||||
.innerHit(new InnerHitBuilder().setFrom(10).setSize(100).setName("_name"))
|
||||
).get()
|
||||
);
|
||||
assertThat(
|
||||
e.getCause().getMessage(),
|
||||
|
@ -676,18 +647,16 @@ public class InnerHitsIT extends ParentChildTestCase {
|
|||
);
|
||||
updateIndexSettings(Settings.builder().put(IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING.getKey(), 110), "index1");
|
||||
assertNoFailures(
|
||||
client().prepareSearch("index1")
|
||||
.setQuery(
|
||||
hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true)
|
||||
.innerHit(new InnerHitBuilder().setFrom(100).setSize(10).setName("_name"))
|
||||
)
|
||||
prepareSearch("index1").setQuery(
|
||||
hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true)
|
||||
.innerHit(new InnerHitBuilder().setFrom(100).setSize(10).setName("_name"))
|
||||
)
|
||||
);
|
||||
assertNoFailures(
|
||||
client().prepareSearch("index1")
|
||||
.setQuery(
|
||||
hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true)
|
||||
.innerHit(new InnerHitBuilder().setFrom(10).setSize(100).setName("_name"))
|
||||
)
|
||||
prepareSearch("index1").setQuery(
|
||||
hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true)
|
||||
.innerHit(new InnerHitBuilder().setFrom(10).setSize(100).setName("_name"))
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1101,48 +1101,44 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
|
|||
|
||||
MultiSearchResponse response = client().prepareMultiSearch()
|
||||
.add(
|
||||
client().prepareSearch("test")
|
||||
.setQuery(
|
||||
new PercolateQueryBuilder(
|
||||
"query",
|
||||
BytesReference.bytes(jsonBuilder().startObject().field("field1", "b").endObject()),
|
||||
XContentType.JSON
|
||||
)
|
||||
prepareSearch("test").setQuery(
|
||||
new PercolateQueryBuilder(
|
||||
"query",
|
||||
BytesReference.bytes(jsonBuilder().startObject().field("field1", "b").endObject()),
|
||||
XContentType.JSON
|
||||
)
|
||||
)
|
||||
)
|
||||
.add(
|
||||
client().prepareSearch("test")
|
||||
.setQuery(
|
||||
new PercolateQueryBuilder(
|
||||
"query",
|
||||
BytesReference.bytes(yamlBuilder().startObject().field("field1", "c").endObject()),
|
||||
XContentType.YAML
|
||||
)
|
||||
prepareSearch("test").setQuery(
|
||||
new PercolateQueryBuilder(
|
||||
"query",
|
||||
BytesReference.bytes(yamlBuilder().startObject().field("field1", "c").endObject()),
|
||||
XContentType.YAML
|
||||
)
|
||||
)
|
||||
)
|
||||
.add(
|
||||
client().prepareSearch("test")
|
||||
.setQuery(
|
||||
new PercolateQueryBuilder(
|
||||
"query",
|
||||
BytesReference.bytes(jsonBuilder().startObject().field("field1", "b c").endObject()),
|
||||
XContentType.JSON
|
||||
)
|
||||
prepareSearch("test").setQuery(
|
||||
new PercolateQueryBuilder(
|
||||
"query",
|
||||
BytesReference.bytes(jsonBuilder().startObject().field("field1", "b c").endObject()),
|
||||
XContentType.JSON
|
||||
)
|
||||
)
|
||||
)
|
||||
.add(
|
||||
client().prepareSearch("test")
|
||||
.setQuery(
|
||||
new PercolateQueryBuilder(
|
||||
"query",
|
||||
BytesReference.bytes(jsonBuilder().startObject().field("field1", "d").endObject()),
|
||||
XContentType.JSON
|
||||
)
|
||||
prepareSearch("test").setQuery(
|
||||
new PercolateQueryBuilder(
|
||||
"query",
|
||||
BytesReference.bytes(jsonBuilder().startObject().field("field1", "d").endObject()),
|
||||
XContentType.JSON
|
||||
)
|
||||
)
|
||||
)
|
||||
.add(client().prepareSearch("test").setQuery(new PercolateQueryBuilder("query", "test", "5", null, null, null)))
|
||||
.add(prepareSearch("test").setQuery(new PercolateQueryBuilder("query", "test", "5", null, null, null)))
|
||||
.add(
|
||||
client().prepareSearch("test") // non existing doc, so error element
|
||||
prepareSearch("test") // non existing doc, so error element
|
||||
.setQuery(new PercolateQueryBuilder("query", "test", "6", null, null, null))
|
||||
)
|
||||
.get();
|
||||
|
@ -1241,40 +1237,33 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
|
|||
|
||||
indicesAdmin().prepareRefresh().get();
|
||||
|
||||
SearchResponse response = client().prepareSearch("test")
|
||||
.setQuery(
|
||||
SearchResponse response = prepareSearch("test").setQuery(
|
||||
new PercolateQueryBuilder(
|
||||
"q",
|
||||
BytesReference.bytes(jsonBuilder().startObject().field("d", "2020-02-01T15:00:00.000+11:00").endObject()),
|
||||
XContentType.JSON
|
||||
)
|
||||
).get();
|
||||
assertEquals(1, response.getHits().getTotalHits().value);
|
||||
|
||||
response = prepareSearch("test").setQuery(
|
||||
new PercolateQueryBuilder(
|
||||
"q",
|
||||
BytesReference.bytes(jsonBuilder().startObject().field("d", "2020-02-01T15:00:00.000+11:00").endObject()),
|
||||
XContentType.JSON
|
||||
)
|
||||
).addSort("_doc", SortOrder.ASC).get();
|
||||
assertEquals(1, response.getHits().getTotalHits().value);
|
||||
|
||||
response = prepareSearch("test").setQuery(
|
||||
constantScoreQuery(
|
||||
new PercolateQueryBuilder(
|
||||
"q",
|
||||
BytesReference.bytes(jsonBuilder().startObject().field("d", "2020-02-01T15:00:00.000+11:00").endObject()),
|
||||
XContentType.JSON
|
||||
)
|
||||
)
|
||||
.get();
|
||||
assertEquals(1, response.getHits().getTotalHits().value);
|
||||
|
||||
response = client().prepareSearch("test")
|
||||
.setQuery(
|
||||
new PercolateQueryBuilder(
|
||||
"q",
|
||||
BytesReference.bytes(jsonBuilder().startObject().field("d", "2020-02-01T15:00:00.000+11:00").endObject()),
|
||||
XContentType.JSON
|
||||
)
|
||||
)
|
||||
.addSort("_doc", SortOrder.ASC)
|
||||
.get();
|
||||
assertEquals(1, response.getHits().getTotalHits().value);
|
||||
|
||||
response = client().prepareSearch("test")
|
||||
.setQuery(
|
||||
constantScoreQuery(
|
||||
new PercolateQueryBuilder(
|
||||
"q",
|
||||
BytesReference.bytes(jsonBuilder().startObject().field("d", "2020-02-01T15:00:00.000+11:00").endObject()),
|
||||
XContentType.JSON
|
||||
)
|
||||
)
|
||||
)
|
||||
.get();
|
||||
).get();
|
||||
assertEquals(1, response.getHits().getTotalHits().value);
|
||||
|
||||
}
|
||||
|
@ -1310,48 +1299,40 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
|
|||
.execute()
|
||||
.actionGet();
|
||||
|
||||
SearchResponse response = client().prepareSearch("test")
|
||||
.setQuery(
|
||||
new PercolateQueryBuilder(
|
||||
"q_simple",
|
||||
BytesReference.bytes(jsonBuilder().startObject().field("text_1", "yada").endObject()),
|
||||
XContentType.JSON
|
||||
)
|
||||
SearchResponse response = prepareSearch("test").setQuery(
|
||||
new PercolateQueryBuilder(
|
||||
"q_simple",
|
||||
BytesReference.bytes(jsonBuilder().startObject().field("text_1", "yada").endObject()),
|
||||
XContentType.JSON
|
||||
)
|
||||
.get();
|
||||
).get();
|
||||
assertEquals(1, response.getHits().getTotalHits().value);
|
||||
|
||||
response = client().prepareSearch("test")
|
||||
.setQuery(
|
||||
new PercolateQueryBuilder(
|
||||
"q_string",
|
||||
BytesReference.bytes(jsonBuilder().startObject().field("text_1", "yada").endObject()),
|
||||
XContentType.JSON
|
||||
)
|
||||
response = prepareSearch("test").setQuery(
|
||||
new PercolateQueryBuilder(
|
||||
"q_string",
|
||||
BytesReference.bytes(jsonBuilder().startObject().field("text_1", "yada").endObject()),
|
||||
XContentType.JSON
|
||||
)
|
||||
.get();
|
||||
).get();
|
||||
assertEquals(1, response.getHits().getTotalHits().value);
|
||||
|
||||
response = client().prepareSearch("test")
|
||||
.setQuery(
|
||||
new PercolateQueryBuilder(
|
||||
"q_match",
|
||||
BytesReference.bytes(jsonBuilder().startObject().field("text_1", "yada").endObject()),
|
||||
XContentType.JSON
|
||||
)
|
||||
response = prepareSearch("test").setQuery(
|
||||
new PercolateQueryBuilder(
|
||||
"q_match",
|
||||
BytesReference.bytes(jsonBuilder().startObject().field("text_1", "yada").endObject()),
|
||||
XContentType.JSON
|
||||
)
|
||||
.get();
|
||||
).get();
|
||||
assertEquals(1, response.getHits().getTotalHits().value);
|
||||
|
||||
response = client().prepareSearch("test")
|
||||
.setQuery(
|
||||
new PercolateQueryBuilder(
|
||||
"q_combo",
|
||||
BytesReference.bytes(jsonBuilder().startObject().field("text_1", "yada").endObject()),
|
||||
XContentType.JSON
|
||||
)
|
||||
response = prepareSearch("test").setQuery(
|
||||
new PercolateQueryBuilder(
|
||||
"q_combo",
|
||||
BytesReference.bytes(jsonBuilder().startObject().field("text_1", "yada").endObject()),
|
||||
XContentType.JSON
|
||||
)
|
||||
.get();
|
||||
).get();
|
||||
assertEquals(1, response.getHits().getTotalHits().value);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -282,7 +282,7 @@ public class ReindexDocumentationIT extends ESIntegTestCase {
|
|||
);
|
||||
|
||||
// Checks that the all documents have been indexed and correctly counted
|
||||
assertHitCount(client().prepareSearch(INDEX_NAME).setSize(0), numDocs);
|
||||
assertHitCount(prepareSearch(INDEX_NAME).setSize(0), numDocs);
|
||||
assertThat(ALLOWED_OPERATIONS.drainPermits(), equalTo(0));
|
||||
|
||||
ReindexRequestBuilder builder = new ReindexRequestBuilder(client, ReindexAction.INSTANCE).source(INDEX_NAME)
|
||||
|
|
|
@ -201,8 +201,7 @@ public class BulkByScrollUsesAllScrollDocumentsAfterConflictsIntegTests extends
|
|||
// Ensure that the write thread blocking task is currently executing
|
||||
barrier.await();
|
||||
|
||||
final SearchResponse searchResponse = client().prepareSearch(sourceIndex)
|
||||
.setSize(numDocs) // Get all indexed docs
|
||||
final SearchResponse searchResponse = prepareSearch(sourceIndex).setSize(numDocs) // Get all indexed docs
|
||||
.addSort(SORTING_FIELD, SortOrder.DESC)
|
||||
.execute()
|
||||
.actionGet();
|
||||
|
|
|
@ -102,7 +102,7 @@ public class CancelTests extends ReindexTestCase {
|
|||
);
|
||||
|
||||
// Checks that the all documents have been indexed and correctly counted
|
||||
assertHitCount(client().prepareSearch(INDEX).setSize(0), numDocs);
|
||||
assertHitCount(prepareSearch(INDEX).setSize(0), numDocs);
|
||||
assertThat(ALLOWED_OPERATIONS.drainPermits(), equalTo(0));
|
||||
|
||||
// Scroll by 1 so that cancellation is easier to control
|
||||
|
@ -225,7 +225,7 @@ public class CancelTests extends ReindexTestCase {
|
|||
assertThat(response, matcher().created(modified).reasonCancelled(equalTo("by user request")));
|
||||
|
||||
refresh("dest");
|
||||
assertHitCount(client().prepareSearch("dest").setSize(0), modified);
|
||||
assertHitCount(prepareSearch("dest").setSize(0), modified);
|
||||
}, equalTo("reindex from [" + INDEX + "] to [dest]"));
|
||||
}
|
||||
|
||||
|
@ -241,7 +241,7 @@ public class CancelTests extends ReindexTestCase {
|
|||
|
||||
testCancel(UpdateByQueryAction.NAME, updateByQuery().setPipeline("set-processed").source(INDEX), (response, total, modified) -> {
|
||||
assertThat(response, matcher().updated(modified).reasonCancelled(equalTo("by user request")));
|
||||
assertHitCount(client().prepareSearch(INDEX).setSize(0).setQuery(termQuery("processed", true)), modified);
|
||||
assertHitCount(prepareSearch(INDEX).setSize(0).setQuery(termQuery("processed", true)), modified);
|
||||
}, equalTo("update-by-query [" + INDEX + "]"));
|
||||
|
||||
assertAcked(clusterAdmin().deletePipeline(new DeletePipelineRequest("set-processed")).get());
|
||||
|
@ -253,7 +253,7 @@ public class CancelTests extends ReindexTestCase {
|
|||
deleteByQuery().source(INDEX).filter(QueryBuilders.matchAllQuery()),
|
||||
(response, total, modified) -> {
|
||||
assertThat(response, matcher().deleted(modified).reasonCancelled(equalTo("by user request")));
|
||||
assertHitCount(client().prepareSearch(INDEX).setSize(0), total - modified);
|
||||
assertHitCount(prepareSearch(INDEX).setSize(0), total - modified);
|
||||
},
|
||||
equalTo("delete-by-query [" + INDEX + "]")
|
||||
);
|
||||
|
@ -266,7 +266,7 @@ public class CancelTests extends ReindexTestCase {
|
|||
(response, total, modified) -> {
|
||||
assertThat(response, matcher().created(modified).reasonCancelled(equalTo("by user request")).slices(hasSize(5)));
|
||||
refresh("dest");
|
||||
assertHitCount(client().prepareSearch("dest").setSize(0), modified);
|
||||
assertHitCount(prepareSearch("dest").setSize(0), modified);
|
||||
},
|
||||
equalTo("reindex from [" + INDEX + "] to [dest]")
|
||||
);
|
||||
|
@ -287,7 +287,7 @@ public class CancelTests extends ReindexTestCase {
|
|||
updateByQuery().setPipeline("set-processed").source(INDEX).setSlices(5),
|
||||
(response, total, modified) -> {
|
||||
assertThat(response, matcher().updated(modified).reasonCancelled(equalTo("by user request")).slices(hasSize(5)));
|
||||
assertHitCount(client().prepareSearch(INDEX).setSize(0).setQuery(termQuery("processed", true)), modified);
|
||||
assertHitCount(prepareSearch(INDEX).setSize(0).setQuery(termQuery("processed", true)), modified);
|
||||
},
|
||||
equalTo("update-by-query [" + INDEX + "]")
|
||||
);
|
||||
|
@ -301,7 +301,7 @@ public class CancelTests extends ReindexTestCase {
|
|||
deleteByQuery().source(INDEX).filter(QueryBuilders.matchAllQuery()).setSlices(5),
|
||||
(response, total, modified) -> {
|
||||
assertThat(response, matcher().deleted(modified).reasonCancelled(equalTo("by user request")).slices(hasSize(5)));
|
||||
assertHitCount(client().prepareSearch(INDEX).setSize(0), total - modified);
|
||||
assertHitCount(prepareSearch(INDEX).setSize(0), total - modified);
|
||||
},
|
||||
equalTo("delete-by-query [" + INDEX + "]")
|
||||
);
|
||||
|
|
|
@ -61,25 +61,25 @@ public class DeleteByQueryBasicTests extends ReindexTestCase {
|
|||
client().prepareIndex("test").setId("7").setSource("foo", "f")
|
||||
);
|
||||
|
||||
assertHitCount(client().prepareSearch("test").setSize(0), 7);
|
||||
assertHitCount(prepareSearch("test").setSize(0), 7);
|
||||
|
||||
// Deletes two docs that matches "foo:a"
|
||||
assertThat(deleteByQuery().source("test").filter(termQuery("foo", "a")).refresh(true).get(), matcher().deleted(2));
|
||||
assertHitCount(client().prepareSearch("test").setSize(0), 5);
|
||||
assertHitCount(prepareSearch("test").setSize(0), 5);
|
||||
|
||||
// Deletes the two first docs with limit by size
|
||||
DeleteByQueryRequestBuilder request = deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).size(2).refresh(true);
|
||||
request.source().addSort("foo.keyword", SortOrder.ASC);
|
||||
assertThat(request.get(), matcher().deleted(2));
|
||||
assertHitCount(client().prepareSearch("test").setSize(0), 3);
|
||||
assertHitCount(prepareSearch("test").setSize(0), 3);
|
||||
|
||||
// Deletes but match no docs
|
||||
assertThat(deleteByQuery().source("test").filter(termQuery("foo", "no_match")).refresh(true).get(), matcher().deleted(0));
|
||||
assertHitCount(client().prepareSearch("test").setSize(0), 3);
|
||||
assertHitCount(prepareSearch("test").setSize(0), 3);
|
||||
|
||||
// Deletes all remaining docs
|
||||
assertThat(deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).refresh(true).get(), matcher().deleted(3));
|
||||
assertHitCount(client().prepareSearch("test").setSize(0), 0);
|
||||
assertHitCount(prepareSearch("test").setSize(0), 0);
|
||||
}
|
||||
|
||||
public void testDeleteByQueryWithOneIndex() throws Exception {
|
||||
|
@ -92,7 +92,7 @@ public class DeleteByQueryBasicTests extends ReindexTestCase {
|
|||
indexRandom(true, true, true, builders);
|
||||
|
||||
assertThat(deleteByQuery().source("t*").filter(QueryBuilders.matchAllQuery()).refresh(true).get(), matcher().deleted(docs));
|
||||
assertHitCount(client().prepareSearch("test").setSize(0), 0);
|
||||
assertHitCount(prepareSearch("test").setSize(0), 0);
|
||||
}
|
||||
|
||||
public void testDeleteByQueryWithMultipleIndices() throws Exception {
|
||||
|
@ -122,7 +122,7 @@ public class DeleteByQueryBasicTests extends ReindexTestCase {
|
|||
|
||||
for (int i = 0; i < indices; i++) {
|
||||
long remaining = docs - candidates[i];
|
||||
assertHitCount(client().prepareSearch("test-" + i).setSize(0), remaining);
|
||||
assertHitCount(prepareSearch("test-" + i).setSize(0), remaining);
|
||||
}
|
||||
|
||||
assertHitCount(client().prepareSearch().setSize(0), (indices * docs) - deletions);
|
||||
|
@ -186,13 +186,13 @@ public class DeleteByQueryBasicTests extends ReindexTestCase {
|
|||
indexRandom(true, true, true, builders);
|
||||
|
||||
int n = between(0, docs - 1);
|
||||
assertHitCount(client().prepareSearch("test").setSize(0).setQuery(matchQuery("_id", Integer.toString(n))), 1);
|
||||
assertHitCount(client().prepareSearch("test").setSize(0).setQuery(QueryBuilders.matchAllQuery()), docs);
|
||||
assertHitCount(prepareSearch("test").setSize(0).setQuery(matchQuery("_id", Integer.toString(n))), 1);
|
||||
assertHitCount(prepareSearch("test").setSize(0).setQuery(QueryBuilders.matchAllQuery()), docs);
|
||||
|
||||
DeleteByQueryRequestBuilder delete = deleteByQuery().source("alias").filter(matchQuery("_id", Integer.toString(n)));
|
||||
assertThat(delete.refresh(true).get(), matcher().deleted(1L));
|
||||
|
||||
assertHitCount(client().prepareSearch("test").setSize(0).setQuery(QueryBuilders.matchAllQuery()), docs - 1);
|
||||
assertHitCount(prepareSearch("test").setSize(0).setQuery(QueryBuilders.matchAllQuery()), docs - 1);
|
||||
}
|
||||
|
||||
public void testDeleteByQueryWithDateMath() throws Exception {
|
||||
|
@ -201,7 +201,7 @@ public class DeleteByQueryBasicTests extends ReindexTestCase {
|
|||
DeleteByQueryRequestBuilder delete = deleteByQuery().source("test").filter(rangeQuery("d").to("now-1h"));
|
||||
assertThat(delete.refresh(true).get(), matcher().deleted(1L));
|
||||
|
||||
assertHitCount(client().prepareSearch("test").setSize(0), 0);
|
||||
assertHitCount(prepareSearch("test").setSize(0), 0);
|
||||
}
|
||||
|
||||
public void testDeleteByQueryOnReadOnlyIndex() throws Exception {
|
||||
|
@ -224,7 +224,7 @@ public class DeleteByQueryBasicTests extends ReindexTestCase {
|
|||
disableIndexBlock("test", SETTING_READ_ONLY);
|
||||
}
|
||||
|
||||
assertHitCount(client().prepareSearch("test").setSize(0), docs);
|
||||
assertHitCount(prepareSearch("test").setSize(0), docs);
|
||||
}
|
||||
|
||||
public void testDeleteByQueryOnReadOnlyAllowDeleteIndex() throws Exception {
|
||||
|
@ -280,9 +280,9 @@ public class DeleteByQueryBasicTests extends ReindexTestCase {
|
|||
}
|
||||
}
|
||||
if (diskAllocationDeciderEnabled) {
|
||||
assertHitCount(client().prepareSearch("test").setSize(0), 0);
|
||||
assertHitCount(prepareSearch("test").setSize(0), 0);
|
||||
} else {
|
||||
assertHitCount(client().prepareSearch("test").setSize(0), docs);
|
||||
assertHitCount(prepareSearch("test").setSize(0), docs);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -297,7 +297,7 @@ public class DeleteByQueryBasicTests extends ReindexTestCase {
|
|||
client().prepareIndex("test").setId("6").setSource("foo", "e"),
|
||||
client().prepareIndex("test").setId("7").setSource("foo", "f")
|
||||
);
|
||||
assertHitCount(client().prepareSearch("test").setSize(0), 7);
|
||||
assertHitCount(prepareSearch("test").setSize(0), 7);
|
||||
|
||||
int slices = randomSlices();
|
||||
int expectedSlices = expectedSliceStatuses(slices, "test");
|
||||
|
@ -307,14 +307,14 @@ public class DeleteByQueryBasicTests extends ReindexTestCase {
|
|||
deleteByQuery().source("test").filter(termQuery("foo", "a")).refresh(true).setSlices(slices).get(),
|
||||
matcher().deleted(2).slices(hasSize(expectedSlices))
|
||||
);
|
||||
assertHitCount(client().prepareSearch("test").setSize(0), 5);
|
||||
assertHitCount(prepareSearch("test").setSize(0), 5);
|
||||
|
||||
// Delete remaining docs
|
||||
assertThat(
|
||||
deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).refresh(true).setSlices(slices).get(),
|
||||
matcher().deleted(5).slices(hasSize(expectedSlices))
|
||||
);
|
||||
assertHitCount(client().prepareSearch("test").setSize(0), 0);
|
||||
assertHitCount(prepareSearch("test").setSize(0), 0);
|
||||
}
|
||||
|
||||
public void testMultipleSources() throws Exception {
|
||||
|
@ -333,7 +333,7 @@ public class DeleteByQueryBasicTests extends ReindexTestCase {
|
|||
List<IndexRequestBuilder> allDocs = docs.values().stream().flatMap(Collection::stream).collect(Collectors.toList());
|
||||
indexRandom(true, allDocs);
|
||||
for (Map.Entry<String, List<IndexRequestBuilder>> entry : docs.entrySet()) {
|
||||
assertHitCount(client().prepareSearch(entry.getKey()).setSize(0), entry.getValue().size());
|
||||
assertHitCount(prepareSearch(entry.getKey()).setSize(0), entry.getValue().size());
|
||||
}
|
||||
|
||||
int slices = randomSlices(1, 10);
|
||||
|
@ -347,7 +347,7 @@ public class DeleteByQueryBasicTests extends ReindexTestCase {
|
|||
);
|
||||
|
||||
for (String index : docs.keySet()) {
|
||||
assertHitCount(client().prepareSearch(index).setSize(0), 0);
|
||||
assertHitCount(prepareSearch(index).setSize(0), 0);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -40,7 +40,7 @@ public class DeleteByQueryConcurrentTests extends ReindexTestCase {
|
|||
final CountDownLatch start = new CountDownLatch(1);
|
||||
for (int t = 0; t < threads.length; t++) {
|
||||
final int threadNum = t;
|
||||
assertHitCount(client().prepareSearch("test").setSize(0).setQuery(QueryBuilders.termQuery("field", threadNum)), docs);
|
||||
assertHitCount(prepareSearch("test").setSize(0).setQuery(QueryBuilders.termQuery("field", threadNum)), docs);
|
||||
|
||||
Runnable r = () -> {
|
||||
try {
|
||||
|
@ -64,7 +64,7 @@ public class DeleteByQueryConcurrentTests extends ReindexTestCase {
|
|||
}
|
||||
|
||||
for (int t = 0; t < threads.length; t++) {
|
||||
assertHitCount(client().prepareSearch("test").setSize(0).setQuery(QueryBuilders.termQuery("field", t)), 0);
|
||||
assertHitCount(prepareSearch("test").setSize(0).setQuery(QueryBuilders.termQuery("field", t)), 0);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -105,7 +105,7 @@ public class DeleteByQueryConcurrentTests extends ReindexTestCase {
|
|||
thread.join();
|
||||
}
|
||||
|
||||
assertHitCount(client().prepareSearch("test").setSize(0), 0L);
|
||||
assertHitCount(prepareSearch("test").setSize(0), 0L);
|
||||
assertThat(deleted.get(), equalTo(docs));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,28 +35,28 @@ public class ReindexBasicTests extends ReindexTestCase {
|
|||
client().prepareIndex("source").setId("3").setSource("foo", "b"),
|
||||
client().prepareIndex("source").setId("4").setSource("foo", "c")
|
||||
);
|
||||
assertHitCount(client().prepareSearch("source").setSize(0), 4);
|
||||
assertHitCount(prepareSearch("source").setSize(0), 4);
|
||||
|
||||
// Copy all the docs
|
||||
ReindexRequestBuilder copy = reindex().source("source").destination("dest").refresh(true);
|
||||
assertThat(copy.get(), matcher().created(4));
|
||||
assertHitCount(client().prepareSearch("dest").setSize(0), 4);
|
||||
assertHitCount(prepareSearch("dest").setSize(0), 4);
|
||||
|
||||
// Now none of them
|
||||
createIndex("none");
|
||||
copy = reindex().source("source").destination("none").filter(termQuery("foo", "no_match")).refresh(true);
|
||||
assertThat(copy.get(), matcher().created(0));
|
||||
assertHitCount(client().prepareSearch("none").setSize(0), 0);
|
||||
assertHitCount(prepareSearch("none").setSize(0), 0);
|
||||
|
||||
// Now half of them
|
||||
copy = reindex().source("source").destination("dest_half").filter(termQuery("foo", "a")).refresh(true);
|
||||
assertThat(copy.get(), matcher().created(2));
|
||||
assertHitCount(client().prepareSearch("dest_half").setSize(0), 2);
|
||||
assertHitCount(prepareSearch("dest_half").setSize(0), 2);
|
||||
|
||||
// Limit with maxDocs
|
||||
copy = reindex().source("source").destination("dest_size_one").maxDocs(1).refresh(true);
|
||||
assertThat(copy.get(), matcher().created(1));
|
||||
assertHitCount(client().prepareSearch("dest_size_one").setSize(0), 1);
|
||||
assertHitCount(prepareSearch("dest_size_one").setSize(0), 1);
|
||||
}
|
||||
|
||||
public void testCopyMany() throws Exception {
|
||||
|
@ -67,14 +67,14 @@ public class ReindexBasicTests extends ReindexTestCase {
|
|||
}
|
||||
|
||||
indexRandom(true, docs);
|
||||
assertHitCount(client().prepareSearch("source").setSize(0), max);
|
||||
assertHitCount(prepareSearch("source").setSize(0), max);
|
||||
|
||||
// Copy all the docs
|
||||
ReindexRequestBuilder copy = reindex().source("source").destination("dest").refresh(true);
|
||||
// Use a small batch size so we have to use more than one batch
|
||||
copy.source().setSize(5);
|
||||
assertThat(copy.get(), matcher().created(max).batches(max, 5));
|
||||
assertHitCount(client().prepareSearch("dest").setSize(0), max);
|
||||
assertHitCount(prepareSearch("dest").setSize(0), max);
|
||||
|
||||
// Copy some of the docs
|
||||
int half = max / 2;
|
||||
|
@ -83,7 +83,7 @@ public class ReindexBasicTests extends ReindexTestCase {
|
|||
copy.source().setSize(5);
|
||||
copy.maxDocs(half);
|
||||
assertThat(copy.get(), matcher().created(half).batches(half, 5));
|
||||
assertHitCount(client().prepareSearch("dest_half").setSize(0), half);
|
||||
assertHitCount(prepareSearch("dest_half").setSize(0), half);
|
||||
}
|
||||
|
||||
public void testCopyManyWithSlices() throws Exception {
|
||||
|
@ -94,7 +94,7 @@ public class ReindexBasicTests extends ReindexTestCase {
|
|||
}
|
||||
|
||||
indexRandom(true, docs);
|
||||
assertHitCount(client().prepareSearch("source").setSize(0), max);
|
||||
assertHitCount(prepareSearch("source").setSize(0), max);
|
||||
|
||||
int slices = randomSlices();
|
||||
int expectedSlices = expectedSliceStatuses(slices, "source");
|
||||
|
@ -104,7 +104,7 @@ public class ReindexBasicTests extends ReindexTestCase {
|
|||
// Use a small batch size so we have to use more than one batch
|
||||
copy.source().setSize(5);
|
||||
assertThat(copy.get(), matcher().created(max).batches(greaterThanOrEqualTo(max / 5)).slices(hasSize(expectedSlices)));
|
||||
assertHitCount(client().prepareSearch("dest").setSize(0), max);
|
||||
assertHitCount(prepareSearch("dest").setSize(0), max);
|
||||
|
||||
// Copy some of the docs
|
||||
int half = max / 2;
|
||||
|
@ -114,7 +114,7 @@ public class ReindexBasicTests extends ReindexTestCase {
|
|||
copy.maxDocs(half);
|
||||
BulkByScrollResponse response = copy.get();
|
||||
assertThat(response, matcher().created(lessThanOrEqualTo((long) half)).slices(hasSize(expectedSlices)));
|
||||
assertHitCount(client().prepareSearch("dest_half").setSize(0), response.getCreated());
|
||||
assertHitCount(prepareSearch("dest_half").setSize(0), response.getCreated());
|
||||
}
|
||||
|
||||
public void testMultipleSources() throws Exception {
|
||||
|
@ -134,7 +134,7 @@ public class ReindexBasicTests extends ReindexTestCase {
|
|||
List<IndexRequestBuilder> allDocs = docs.values().stream().flatMap(Collection::stream).collect(Collectors.toList());
|
||||
indexRandom(true, allDocs);
|
||||
for (Map.Entry<String, List<IndexRequestBuilder>> entry : docs.entrySet()) {
|
||||
assertHitCount(client().prepareSearch(entry.getKey()).setSize(0), entry.getValue().size());
|
||||
assertHitCount(prepareSearch(entry.getKey()).setSize(0), entry.getValue().size());
|
||||
}
|
||||
|
||||
int slices = randomSlices(1, 10);
|
||||
|
@ -145,7 +145,7 @@ public class ReindexBasicTests extends ReindexTestCase {
|
|||
|
||||
BulkByScrollResponse response = request.get();
|
||||
assertThat(response, matcher().created(allDocs.size()).slices(hasSize(expectedSlices)));
|
||||
assertHitCount(client().prepareSearch("dest").setSize(0), allDocs.size());
|
||||
assertHitCount(prepareSearch("dest").setSize(0), allDocs.size());
|
||||
}
|
||||
|
||||
public void testMissingSources() {
|
||||
|
@ -166,12 +166,12 @@ public class ReindexBasicTests extends ReindexTestCase {
|
|||
client().prepareIndex(sourceIndexName).setId("3").setSource("foo", "b"),
|
||||
client().prepareIndex(sourceIndexName).setId("4").setSource("foo", "c")
|
||||
);
|
||||
assertHitCount(client().prepareSearch(sourceIndexName).setSize(0), 4);
|
||||
assertHitCount(prepareSearch(sourceIndexName).setSize(0), 4);
|
||||
|
||||
// Copy all the docs
|
||||
ReindexRequestBuilder copy = reindex().source(sourceIndexName).destination(destIndexName).refresh(true);
|
||||
assertThat(copy.get(), matcher().created(4));
|
||||
assertHitCount(client().prepareSearch(destIndexName).setSize(0), 4);
|
||||
assertHitCount(prepareSearch(destIndexName).setSize(0), 4);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -128,7 +128,7 @@ public class ReindexFailureTests extends ReindexTestCase {
|
|||
client().prepareIndex(sourceIndexName).setId("3").setSource("foo", "b"),
|
||||
client().prepareIndex(sourceIndexName).setId("4").setSource("foo", "c")
|
||||
);
|
||||
assertHitCount(client().prepareSearch(sourceIndexName).setSize(0), 4);
|
||||
assertHitCount(prepareSearch(sourceIndexName).setSize(0), 4);
|
||||
|
||||
ActionRequestValidationException e = expectThrows(
|
||||
ActionRequestValidationException.class,
|
||||
|
|
|
@ -34,7 +34,7 @@ public class UpdateByQueryBasicTests extends ReindexTestCase {
|
|||
client().prepareIndex("test").setId("3").setSource("foo", "b"),
|
||||
client().prepareIndex("test").setId("4").setSource("foo", "c")
|
||||
);
|
||||
assertHitCount(client().prepareSearch("test").setSize(0), 4);
|
||||
assertHitCount(prepareSearch("test").setSize(0), 4);
|
||||
assertEquals(1, client().prepareGet("test", "1").get().getVersion());
|
||||
assertEquals(1, client().prepareGet("test", "4").get().getVersion());
|
||||
|
||||
|
@ -74,7 +74,7 @@ public class UpdateByQueryBasicTests extends ReindexTestCase {
|
|||
client().prepareIndex("test").setId("3").setSource("foo", "b"),
|
||||
client().prepareIndex("test").setId("4").setSource("foo", "c")
|
||||
);
|
||||
assertHitCount(client().prepareSearch("test").setSize(0), 4);
|
||||
assertHitCount(prepareSearch("test").setSize(0), 4);
|
||||
assertEquals(1, client().prepareGet("test", "1").get().getVersion());
|
||||
assertEquals(1, client().prepareGet("test", "4").get().getVersion());
|
||||
|
||||
|
@ -124,7 +124,7 @@ public class UpdateByQueryBasicTests extends ReindexTestCase {
|
|||
List<IndexRequestBuilder> allDocs = docs.values().stream().flatMap(Collection::stream).collect(Collectors.toList());
|
||||
indexRandom(true, allDocs);
|
||||
for (Map.Entry<String, List<IndexRequestBuilder>> entry : docs.entrySet()) {
|
||||
assertHitCount(client().prepareSearch(entry.getKey()).setSize(0), entry.getValue().size());
|
||||
assertHitCount(prepareSearch(entry.getKey()).setSize(0), entry.getValue().size());
|
||||
}
|
||||
|
||||
int slices = randomSlices(1, 10);
|
||||
|
|
|
@ -198,7 +198,7 @@ public class S3BlobStoreRepositoryTests extends ESMockAPIBasedRepositoryIntegTes
|
|||
flushAndRefresh(index);
|
||||
ForceMergeResponse forceMerge = client().admin().indices().prepareForceMerge(index).setFlush(true).setMaxNumSegments(1).get();
|
||||
assertThat(forceMerge.getSuccessfulShards(), equalTo(1));
|
||||
assertHitCount(client().prepareSearch(index).setSize(0).setTrackTotalHits(true), nbDocs);
|
||||
assertHitCount(prepareSearch(index).setSize(0).setTrackTotalHits(true), nbDocs);
|
||||
|
||||
// Intentionally fail snapshot to trigger abortMultipartUpload requests
|
||||
shouldFailCompleteMultipartUploadRequest.set(true);
|
||||
|
@ -240,14 +240,14 @@ public class S3BlobStoreRepositoryTests extends ESMockAPIBasedRepositoryIntegTes
|
|||
flushAndRefresh(index);
|
||||
ForceMergeResponse forceMerge = client().admin().indices().prepareForceMerge(index).setFlush(true).setMaxNumSegments(1).get();
|
||||
assertThat(forceMerge.getSuccessfulShards(), equalTo(1));
|
||||
assertHitCount(client().prepareSearch(index).setSize(0).setTrackTotalHits(true), nbDocs);
|
||||
assertHitCount(prepareSearch(index).setSize(0).setTrackTotalHits(true), nbDocs);
|
||||
|
||||
final String snapshot = "snapshot";
|
||||
assertSuccessfulSnapshot(clusterAdmin().prepareCreateSnapshot(repository, snapshot).setWaitForCompletion(true).setIndices(index));
|
||||
assertAcked(client().admin().indices().prepareDelete(index));
|
||||
assertSuccessfulRestore(clusterAdmin().prepareRestoreSnapshot(repository, snapshot).setWaitForCompletion(true));
|
||||
ensureGreen(index);
|
||||
assertHitCount(client().prepareSearch(index).setSize(0).setTrackTotalHits(true), nbDocs);
|
||||
assertHitCount(prepareSearch(index).setSize(0).setTrackTotalHits(true), nbDocs);
|
||||
assertAcked(clusterAdmin().prepareDeleteSnapshot(repository, snapshot).get());
|
||||
|
||||
final Map<String, Long> aggregatedMetrics = new HashMap<>();
|
||||
|
|
|
@ -110,15 +110,15 @@ public class SizeMappingIT extends ESIntegTestCase {
|
|||
assertAcked(prepareCreate("test").setMapping("_size", "enabled=true"));
|
||||
final String source = "{\"f\":\"" + randomAlphaOfLengthBetween(1, 100) + "\"}";
|
||||
indexRandom(true, client().prepareIndex("test").setId("1").setSource(source, XContentType.JSON));
|
||||
SearchResponse searchResponse = client().prepareSearch("test").addFetchField("_size").get();
|
||||
SearchResponse searchResponse = prepareSearch("test").addFetchField("_size").get();
|
||||
assertEquals(source.length(), ((Long) searchResponse.getHits().getHits()[0].getFields().get("_size").getValue()).intValue());
|
||||
|
||||
// this should not work when requesting fields via wildcard expression
|
||||
searchResponse = client().prepareSearch("test").addFetchField("*").get();
|
||||
searchResponse = prepareSearch("test").addFetchField("*").get();
|
||||
assertNull(searchResponse.getHits().getHits()[0].getFields().get("_size"));
|
||||
|
||||
// This should STILL work
|
||||
searchResponse = client().prepareSearch("test").addStoredField("*").get();
|
||||
searchResponse = prepareSearch("test").addStoredField("*").get();
|
||||
assertNotNull(searchResponse.getHits().getHits()[0].getFields().get("_size"));
|
||||
}
|
||||
|
||||
|
@ -126,13 +126,13 @@ public class SizeMappingIT extends ESIntegTestCase {
|
|||
assertAcked(prepareCreate("test").setMapping("_size", "enabled=false"));
|
||||
final String source = "{\"f\":\"" + randomAlphaOfLengthBetween(1, 100) + "\"}";
|
||||
indexRandom(true, client().prepareIndex("test").setId("1").setSource(source, XContentType.JSON));
|
||||
SearchResponse searchResponse = client().prepareSearch("test").addFetchField("_size").get();
|
||||
SearchResponse searchResponse = prepareSearch("test").addFetchField("_size").get();
|
||||
assertNull(searchResponse.getHits().getHits()[0].getFields().get("_size"));
|
||||
|
||||
searchResponse = client().prepareSearch("test").addFetchField("*").get();
|
||||
searchResponse = prepareSearch("test").addFetchField("*").get();
|
||||
assertNull(searchResponse.getHits().getHits()[0].getFields().get("_size"));
|
||||
|
||||
searchResponse = client().prepareSearch("test").addStoredField("*").get();
|
||||
searchResponse = prepareSearch("test").addStoredField("*").get();
|
||||
assertNull(searchResponse.getHits().getHits()[0].getFields().get("_size"));
|
||||
}
|
||||
|
||||
|
@ -140,13 +140,13 @@ public class SizeMappingIT extends ESIntegTestCase {
|
|||
assertAcked(prepareCreate("test"));
|
||||
final String source = "{\"f\":\"" + randomAlphaOfLengthBetween(1, 100) + "\"}";
|
||||
indexRandom(true, client().prepareIndex("test").setId("1").setSource(source, XContentType.JSON));
|
||||
SearchResponse searchResponse = client().prepareSearch("test").addFetchField("_size").get();
|
||||
SearchResponse searchResponse = prepareSearch("test").addFetchField("_size").get();
|
||||
assertNull(searchResponse.getHits().getHits()[0].getFields().get("_size"));
|
||||
|
||||
searchResponse = client().prepareSearch("test").addFetchField("*").get();
|
||||
searchResponse = prepareSearch("test").addFetchField("*").get();
|
||||
assertNull(searchResponse.getHits().getHits()[0].getFields().get("_size"));
|
||||
|
||||
searchResponse = client().prepareSearch("test").addStoredField("*").get();
|
||||
searchResponse = prepareSearch("test").addStoredField("*").get();
|
||||
assertNull(searchResponse.getHits().getHits()[0].getFields().get("_size"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,7 +32,7 @@ public abstract class AbstractAzureFsTestCase extends ESIntegTestCase {
|
|||
indexDoc("test", "" + i, "foo", "bar");
|
||||
}
|
||||
refresh();
|
||||
SearchResponse response = client().prepareSearch("test").get();
|
||||
SearchResponse response = prepareSearch("test").get();
|
||||
assertThat(response.getHits().getTotalHits().value, is(nbDocs));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -50,8 +50,7 @@ public class RejectionActionIT extends ESIntegTestCase {
|
|||
final CountDownLatch latch = new CountDownLatch(numberOfAsyncOps);
|
||||
final CopyOnWriteArrayList<Object> responses = new CopyOnWriteArrayList<>();
|
||||
for (int i = 0; i < numberOfAsyncOps; i++) {
|
||||
client().prepareSearch("test")
|
||||
.setSearchType(SearchType.QUERY_THEN_FETCH)
|
||||
prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH)
|
||||
.setQuery(QueryBuilders.matchQuery("field", "1"))
|
||||
.execute(new LatchedActionListener<>(new ActionListener<SearchResponse>() {
|
||||
@Override
|
||||
|
|
|
@ -782,15 +782,15 @@ public class TasksIT extends ESIntegTestCase {
|
|||
|
||||
assertNoFailures(indicesAdmin().prepareRefresh(TaskResultsService.TASK_INDEX).get());
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch(TaskResultsService.TASK_INDEX)
|
||||
.setSource(SearchSourceBuilder.searchSource().query(QueryBuilders.termQuery("task.action", taskInfo.action())))
|
||||
.get();
|
||||
SearchResponse searchResponse = prepareSearch(TaskResultsService.TASK_INDEX).setSource(
|
||||
SearchSourceBuilder.searchSource().query(QueryBuilders.termQuery("task.action", taskInfo.action()))
|
||||
).get();
|
||||
|
||||
assertEquals(1L, searchResponse.getHits().getTotalHits().value);
|
||||
|
||||
searchResponse = client().prepareSearch(TaskResultsService.TASK_INDEX)
|
||||
.setSource(SearchSourceBuilder.searchSource().query(QueryBuilders.termQuery("task.node", taskInfo.taskId().getNodeId())))
|
||||
.get();
|
||||
searchResponse = prepareSearch(TaskResultsService.TASK_INDEX).setSource(
|
||||
SearchSourceBuilder.searchSource().query(QueryBuilders.termQuery("task.node", taskInfo.taskId().getNodeId()))
|
||||
).get();
|
||||
|
||||
assertEquals(1L, searchResponse.getHits().getTotalHits().value);
|
||||
|
||||
|
|
|
@ -80,21 +80,21 @@ public class CloneIndexIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
final int size = docs > 0 ? 2 * docs : 1;
|
||||
assertHitCount(client().prepareSearch("target").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs);
|
||||
assertHitCount(prepareSearch("target").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs);
|
||||
|
||||
if (createWithReplicas == false) {
|
||||
// bump replicas
|
||||
setReplicaCount(1, "target");
|
||||
ensureGreen();
|
||||
assertHitCount(client().prepareSearch("target").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs);
|
||||
assertHitCount(prepareSearch("target").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs);
|
||||
}
|
||||
|
||||
for (int i = docs; i < 2 * docs; i++) {
|
||||
client().prepareIndex("target").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get();
|
||||
}
|
||||
flushAndRefresh();
|
||||
assertHitCount(client().prepareSearch("target").setSize(2 * size).setQuery(new TermsQueryBuilder("foo", "bar")), 2 * docs);
|
||||
assertHitCount(client().prepareSearch("source").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs);
|
||||
assertHitCount(prepareSearch("target").setSize(2 * size).setQuery(new TermsQueryBuilder("foo", "bar")), 2 * docs);
|
||||
assertHitCount(prepareSearch("source").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs);
|
||||
GetSettingsResponse target = indicesAdmin().prepareGetSettings("target").get();
|
||||
assertThat(
|
||||
target.getIndexToSettings().get("target").getAsVersionId("index.version.created", IndexVersion::fromId),
|
||||
|
|
|
@ -270,11 +270,10 @@ public class CreateIndexIT extends ESIntegTestCase {
|
|||
|
||||
// we only really assert that we never reuse segments of old indices or anything like this here and that nothing fails with
|
||||
// crazy exceptions
|
||||
SearchResponse expected = client().prepareSearch("test")
|
||||
.setIndicesOptions(IndicesOptions.lenientExpandOpen())
|
||||
SearchResponse expected = prepareSearch("test").setIndicesOptions(IndicesOptions.lenientExpandOpen())
|
||||
.setQuery(new RangeQueryBuilder("index_version").from(indexVersion.get(), true))
|
||||
.get();
|
||||
SearchResponse all = client().prepareSearch("test").setIndicesOptions(IndicesOptions.lenientExpandOpen()).get();
|
||||
SearchResponse all = prepareSearch("test").setIndicesOptions(IndicesOptions.lenientExpandOpen()).get();
|
||||
assertEquals(expected + " vs. " + all, expected.getHits().getTotalHits().value, all.getHits().getTotalHits().value);
|
||||
logger.info("total: {}", expected.getHits().getTotalHits().value);
|
||||
}
|
||||
|
|
|
@ -107,7 +107,7 @@ public class ShrinkIndexIT extends ESIntegTestCase {
|
|||
.get()
|
||||
);
|
||||
ensureGreen();
|
||||
assertHitCount(client().prepareSearch("first_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20);
|
||||
assertHitCount(prepareSearch("first_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20);
|
||||
|
||||
for (int i = 0; i < 20; i++) { // now update
|
||||
client().prepareIndex("first_shrink")
|
||||
|
@ -116,8 +116,8 @@ public class ShrinkIndexIT extends ESIntegTestCase {
|
|||
.get();
|
||||
}
|
||||
flushAndRefresh();
|
||||
assertHitCount(client().prepareSearch("first_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20);
|
||||
assertHitCount(client().prepareSearch("source").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20);
|
||||
assertHitCount(prepareSearch("first_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20);
|
||||
assertHitCount(prepareSearch("source").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20);
|
||||
|
||||
// relocate all shards to one node such that we can merge it.
|
||||
updateIndexSettings(
|
||||
|
@ -134,14 +134,14 @@ public class ShrinkIndexIT extends ESIntegTestCase {
|
|||
.get()
|
||||
);
|
||||
ensureGreen();
|
||||
assertHitCount(client().prepareSearch("second_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20);
|
||||
assertHitCount(prepareSearch("second_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20);
|
||||
// let it be allocated anywhere and bump replicas
|
||||
updateIndexSettings(
|
||||
Settings.builder().putNull("index.routing.allocation.include._id").put("index.number_of_replicas", 1),
|
||||
"second_shrink"
|
||||
);
|
||||
ensureGreen();
|
||||
assertHitCount(client().prepareSearch("second_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20);
|
||||
assertHitCount(prepareSearch("second_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20);
|
||||
|
||||
for (int i = 0; i < 20; i++) { // now update
|
||||
client().prepareIndex("second_shrink")
|
||||
|
@ -150,9 +150,9 @@ public class ShrinkIndexIT extends ESIntegTestCase {
|
|||
.get();
|
||||
}
|
||||
flushAndRefresh();
|
||||
assertHitCount(client().prepareSearch("second_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20);
|
||||
assertHitCount(client().prepareSearch("first_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20);
|
||||
assertHitCount(client().prepareSearch("source").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20);
|
||||
assertHitCount(prepareSearch("second_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20);
|
||||
assertHitCount(prepareSearch("first_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20);
|
||||
assertHitCount(prepareSearch("source").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20);
|
||||
|
||||
assertNoResizeSourceIndexSettings("first_shrink");
|
||||
assertNoResizeSourceIndexSettings("second_shrink");
|
||||
|
@ -311,21 +311,21 @@ public class ShrinkIndexIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
final int size = docs > 0 ? 2 * docs : 1;
|
||||
assertHitCount(client().prepareSearch("target").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs);
|
||||
assertHitCount(prepareSearch("target").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs);
|
||||
|
||||
if (createWithReplicas == false) {
|
||||
// bump replicas
|
||||
setReplicaCount(1, "target");
|
||||
ensureGreen();
|
||||
assertHitCount(client().prepareSearch("target").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs);
|
||||
assertHitCount(prepareSearch("target").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs);
|
||||
}
|
||||
|
||||
for (int i = docs; i < 2 * docs; i++) {
|
||||
client().prepareIndex("target").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get();
|
||||
}
|
||||
flushAndRefresh();
|
||||
assertHitCount(client().prepareSearch("target").setSize(2 * size).setQuery(new TermsQueryBuilder("foo", "bar")), 2 * docs);
|
||||
assertHitCount(client().prepareSearch("source").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs);
|
||||
assertHitCount(prepareSearch("target").setSize(2 * size).setQuery(new TermsQueryBuilder("foo", "bar")), 2 * docs);
|
||||
assertHitCount(prepareSearch("source").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs);
|
||||
GetSettingsResponse target = indicesAdmin().prepareGetSettings("target").get();
|
||||
assertThat(
|
||||
target.getIndexToSettings().get("target").getAsVersionId("index.version.created", IndexVersion::fromId),
|
||||
|
@ -409,7 +409,7 @@ public class ShrinkIndexIT extends ESIntegTestCase {
|
|||
// we support the expected shard size in the allocator to sum up over the source index shards
|
||||
assertTrue("expected shard size must be set but wasn't: " + expectedShardSize, expectedShardSize > 0);
|
||||
ensureGreen();
|
||||
assertHitCount(client().prepareSearch("target").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20);
|
||||
assertHitCount(prepareSearch("target").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20);
|
||||
assertNoResizeSourceIndexSettings("target");
|
||||
}
|
||||
|
||||
|
|
|
@ -189,7 +189,7 @@ public class SplitIndexIT extends ESIntegTestCase {
|
|||
.get()
|
||||
);
|
||||
ensureGreen();
|
||||
assertHitCount(client().prepareSearch("first_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs);
|
||||
assertHitCount(prepareSearch("first_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs);
|
||||
assertNoResizeSourceIndexSettings("first_split");
|
||||
|
||||
for (int i = 0; i < numDocs; i++) { // now update
|
||||
|
@ -200,8 +200,8 @@ public class SplitIndexIT extends ESIntegTestCase {
|
|||
builder.get();
|
||||
}
|
||||
flushAndRefresh();
|
||||
assertHitCount(client().prepareSearch("first_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs);
|
||||
assertHitCount(client().prepareSearch("source").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs);
|
||||
assertHitCount(prepareSearch("first_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs);
|
||||
assertHitCount(prepareSearch("source").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs);
|
||||
for (int i = 0; i < numDocs; i++) {
|
||||
GetResponse getResponse = client().prepareGet("first_split", Integer.toString(i)).setRouting(routingValue[i]).get();
|
||||
assertTrue(getResponse.isExists());
|
||||
|
@ -217,13 +217,13 @@ public class SplitIndexIT extends ESIntegTestCase {
|
|||
.get()
|
||||
);
|
||||
ensureGreen();
|
||||
assertHitCount(client().prepareSearch("second_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs);
|
||||
assertHitCount(prepareSearch("second_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs);
|
||||
assertNoResizeSourceIndexSettings("second_split");
|
||||
|
||||
// let it be allocated anywhere and bump replicas
|
||||
setReplicaCount(1, "second_split");
|
||||
ensureGreen();
|
||||
assertHitCount(client().prepareSearch("second_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs);
|
||||
assertHitCount(prepareSearch("second_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs);
|
||||
|
||||
for (int i = 0; i < numDocs; i++) { // now update
|
||||
IndexRequestBuilder builder = indexFunc.apply("second_split", i);
|
||||
|
@ -237,30 +237,24 @@ public class SplitIndexIT extends ESIntegTestCase {
|
|||
GetResponse getResponse = client().prepareGet("second_split", Integer.toString(i)).setRouting(routingValue[i]).get();
|
||||
assertTrue(getResponse.isExists());
|
||||
}
|
||||
assertHitCount(client().prepareSearch("second_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs);
|
||||
assertHitCount(client().prepareSearch("first_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs);
|
||||
assertHitCount(client().prepareSearch("source").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs);
|
||||
assertHitCount(prepareSearch("second_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs);
|
||||
assertHitCount(prepareSearch("first_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs);
|
||||
assertHitCount(prepareSearch("source").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs);
|
||||
if (useNested) {
|
||||
assertNested("source", numDocs);
|
||||
assertNested("first_split", numDocs);
|
||||
assertNested("second_split", numDocs);
|
||||
}
|
||||
assertAllUniqueDocs(
|
||||
client().prepareSearch("second_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(),
|
||||
numDocs
|
||||
);
|
||||
assertAllUniqueDocs(
|
||||
client().prepareSearch("first_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(),
|
||||
numDocs
|
||||
);
|
||||
assertAllUniqueDocs(client().prepareSearch("source").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), numDocs);
|
||||
assertAllUniqueDocs(prepareSearch("second_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), numDocs);
|
||||
assertAllUniqueDocs(prepareSearch("first_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), numDocs);
|
||||
assertAllUniqueDocs(prepareSearch("source").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), numDocs);
|
||||
}
|
||||
|
||||
public void assertNested(String index, int numDocs) {
|
||||
// now, do a nested query
|
||||
SearchResponse searchResponse = client().prepareSearch(index)
|
||||
.setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"), ScoreMode.Avg))
|
||||
.get();
|
||||
SearchResponse searchResponse = prepareSearch(index).setQuery(
|
||||
nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"), ScoreMode.Avg)
|
||||
).get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) numDocs));
|
||||
}
|
||||
|
@ -410,21 +404,21 @@ public class SplitIndexIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
final int size = docs > 0 ? 2 * docs : 1;
|
||||
assertHitCount(client().prepareSearch("target").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs);
|
||||
assertHitCount(prepareSearch("target").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs);
|
||||
|
||||
if (createWithReplicas == false) {
|
||||
// bump replicas
|
||||
setReplicaCount(1, "target");
|
||||
ensureGreen();
|
||||
assertHitCount(client().prepareSearch("target").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs);
|
||||
assertHitCount(prepareSearch("target").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs);
|
||||
}
|
||||
|
||||
for (int i = docs; i < 2 * docs; i++) {
|
||||
client().prepareIndex("target").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get();
|
||||
}
|
||||
flushAndRefresh();
|
||||
assertHitCount(client().prepareSearch("target").setSize(2 * size).setQuery(new TermsQueryBuilder("foo", "bar")), 2 * docs);
|
||||
assertHitCount(client().prepareSearch("source").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs);
|
||||
assertHitCount(prepareSearch("target").setSize(2 * size).setQuery(new TermsQueryBuilder("foo", "bar")), 2 * docs);
|
||||
assertHitCount(prepareSearch("source").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs);
|
||||
GetSettingsResponse target = indicesAdmin().prepareGetSettings("target").get();
|
||||
assertThat(
|
||||
target.getIndexToSettings().get("target").getAsVersionId("index.version.created", IndexVersion::fromId),
|
||||
|
|
|
@ -136,7 +136,7 @@ public class BulkProcessor2RetryIT extends ESIntegTestCase {
|
|||
|
||||
indicesAdmin().refresh(new RefreshRequest()).get();
|
||||
|
||||
SearchResponse results = client().prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchAllQuery()).setSize(0).get();
|
||||
SearchResponse results = prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchAllQuery()).setSize(0).get();
|
||||
assertThat(bulkProcessor.getTotalBytesInFlight(), equalTo(0L));
|
||||
if (rejectedExecutionExpected) {
|
||||
assertThat((int) results.getHits().getTotalHits().value, lessThanOrEqualTo(numberOfAsyncOps));
|
||||
|
|
|
@ -131,7 +131,7 @@ public class BulkProcessorRetryIT extends ESIntegTestCase {
|
|||
|
||||
indicesAdmin().refresh(new RefreshRequest()).get();
|
||||
|
||||
SearchResponse results = client().prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchAllQuery()).setSize(0).get();
|
||||
SearchResponse results = prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchAllQuery()).setSize(0).get();
|
||||
|
||||
if (rejectedExecutionExpected) {
|
||||
assertThat((int) results.getHits().getTotalHits().value, lessThanOrEqualTo(numberOfAsyncOps));
|
||||
|
|
|
@ -634,7 +634,7 @@ public class BulkWithUpdatesIT extends ESIntegTestCase {
|
|||
.setRefreshPolicy(RefreshPolicy.IMMEDIATE);
|
||||
|
||||
client().bulk(bulkRequest).get();
|
||||
assertHitCount(client().prepareSearch("bulkindex*"), 3);
|
||||
assertHitCount(prepareSearch("bulkindex*"), 3);
|
||||
|
||||
assertBusy(() -> assertAcked(indicesAdmin().prepareClose("bulkindex2")));
|
||||
|
||||
|
|
|
@ -42,7 +42,7 @@ public class WriteAckDelayIT extends ESIntegTestCase {
|
|||
for (int j = 0; j < numOfChecks; j++) {
|
||||
try {
|
||||
logger.debug("running search");
|
||||
SearchResponse response = client().prepareSearch("test").get();
|
||||
SearchResponse response = prepareSearch("test").get();
|
||||
if (response.getHits().getTotalHits().value != numOfDocs) {
|
||||
final String message = "Count is "
|
||||
+ response.getHits().getTotalHits().value
|
||||
|
|
|
@ -132,8 +132,7 @@ public class LookupRuntimeFieldIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testBasic() {
|
||||
SearchResponse searchResponse = client().prepareSearch("books")
|
||||
.addFetchField("author")
|
||||
SearchResponse searchResponse = prepareSearch("books").addFetchField("author")
|
||||
.addFetchField("title")
|
||||
.addSort("published_date", SortOrder.DESC)
|
||||
.setSize(3)
|
||||
|
@ -169,18 +168,17 @@ public class LookupRuntimeFieldIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testLookupMultipleIndices() throws IOException {
|
||||
SearchResponse searchResponse = client().prepareSearch("books")
|
||||
.setRuntimeMappings(parseMapping("""
|
||||
{
|
||||
"publisher": {
|
||||
"type": "lookup",
|
||||
"target_index": "publishers",
|
||||
"input_field": "publisher_id",
|
||||
"target_field": "_id",
|
||||
"fetch_fields": ["name", "city"]
|
||||
}
|
||||
SearchResponse searchResponse = prepareSearch("books").setRuntimeMappings(parseMapping("""
|
||||
{
|
||||
"publisher": {
|
||||
"type": "lookup",
|
||||
"target_index": "publishers",
|
||||
"input_field": "publisher_id",
|
||||
"target_field": "_id",
|
||||
"fetch_fields": ["name", "city"]
|
||||
}
|
||||
"""))
|
||||
}
|
||||
"""))
|
||||
.setFetchSource(false)
|
||||
.addFetchField("title")
|
||||
.addFetchField("author")
|
||||
|
@ -217,7 +215,7 @@ public class LookupRuntimeFieldIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testFetchField() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("books").setRuntimeMappings(parseMapping("""
|
||||
SearchResponse searchResponse = prepareSearch("books").setRuntimeMappings(parseMapping("""
|
||||
{
|
||||
"author": {
|
||||
"type": "lookup",
|
||||
|
|
|
@ -96,7 +96,7 @@ public class PointInTimeIT extends ESIntegTestCase {
|
|||
}
|
||||
refresh("test");
|
||||
if (randomBoolean()) {
|
||||
SearchResponse resp2 = client().prepareSearch("test").setPreference(null).setQuery(new MatchAllQueryBuilder()).get();
|
||||
SearchResponse resp2 = prepareSearch("test").setPreference(null).setQuery(new MatchAllQueryBuilder()).get();
|
||||
assertNoFailures(resp2);
|
||||
assertHitCount(resp2, numDocs - deletedDocs);
|
||||
}
|
||||
|
@ -259,7 +259,7 @@ public class PointInTimeIT extends ESIntegTestCase {
|
|||
assertHitCount(resp, index1 + index2);
|
||||
indicesAdmin().prepareDelete("index-1").get();
|
||||
if (randomBoolean()) {
|
||||
resp = client().prepareSearch("index-*").get();
|
||||
resp = prepareSearch("index-*").get();
|
||||
assertNoFailures(resp);
|
||||
assertHitCount(resp, index2);
|
||||
}
|
||||
|
|
|
@ -310,12 +310,11 @@ public class TransportSearchIT extends ESIntegTestCase {
|
|||
Arrays.fill(validCheckpoints, SequenceNumbers.UNASSIGNED_SEQ_NO);
|
||||
|
||||
// no exception
|
||||
client().prepareSearch("testAlias").setWaitForCheckpoints(Collections.singletonMap("testAlias", validCheckpoints)).get();
|
||||
prepareSearch("testAlias").setWaitForCheckpoints(Collections.singletonMap("testAlias", validCheckpoints)).get();
|
||||
|
||||
IllegalArgumentException e = expectThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> client().prepareSearch("testFailedAlias")
|
||||
.setWaitForCheckpoints(Collections.singletonMap("testFailedAlias", validCheckpoints))
|
||||
() -> prepareSearch("testFailedAlias").setWaitForCheckpoints(Collections.singletonMap("testFailedAlias", validCheckpoints))
|
||||
.get()
|
||||
);
|
||||
assertThat(
|
||||
|
@ -328,7 +327,7 @@ public class TransportSearchIT extends ESIntegTestCase {
|
|||
|
||||
IllegalArgumentException e2 = expectThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> client().prepareSearch("test1").setWaitForCheckpoints(Collections.singletonMap("test1", new long[2])).get()
|
||||
() -> prepareSearch("test1").setWaitForCheckpoints(Collections.singletonMap("test1", new long[2])).get()
|
||||
);
|
||||
assertThat(
|
||||
e2.getMessage(),
|
||||
|
@ -342,7 +341,7 @@ public class TransportSearchIT extends ESIntegTestCase {
|
|||
|
||||
IllegalArgumentException e3 = expectThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> client().prepareSearch("testAlias").setWaitForCheckpoints(Collections.singletonMap("testAlias", new long[2])).get()
|
||||
() -> prepareSearch("testAlias").setWaitForCheckpoints(Collections.singletonMap("testAlias", new long[2])).get()
|
||||
);
|
||||
assertThat(
|
||||
e3.getMessage(),
|
||||
|
@ -356,7 +355,7 @@ public class TransportSearchIT extends ESIntegTestCase {
|
|||
|
||||
IllegalArgumentException e4 = expectThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> client().prepareSearch("testAlias").setWaitForCheckpoints(Collections.singletonMap("test2", validCheckpoints)).get()
|
||||
() -> prepareSearch("testAlias").setWaitForCheckpoints(Collections.singletonMap("test2", validCheckpoints)).get()
|
||||
);
|
||||
assertThat(
|
||||
e4.getMessage(),
|
||||
|
@ -375,11 +374,11 @@ public class TransportSearchIT extends ESIntegTestCase {
|
|||
assertAcked(prepareCreate("test2").setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numPrimaries2)));
|
||||
|
||||
// no exception
|
||||
client().prepareSearch("test1").get();
|
||||
prepareSearch("test1").get();
|
||||
|
||||
updateClusterSettings(Settings.builder().put(TransportSearchAction.SHARD_COUNT_LIMIT_SETTING.getKey(), numPrimaries1 - 1));
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> client().prepareSearch("test1").get());
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> prepareSearch("test1").get());
|
||||
assertThat(
|
||||
e.getMessage(),
|
||||
containsString("Trying to query " + numPrimaries1 + " shards, which is over the limit of " + (numPrimaries1 - 1))
|
||||
|
@ -388,7 +387,7 @@ public class TransportSearchIT extends ESIntegTestCase {
|
|||
updateClusterSettings(Settings.builder().put(TransportSearchAction.SHARD_COUNT_LIMIT_SETTING.getKey(), numPrimaries1));
|
||||
|
||||
// no exception
|
||||
client().prepareSearch("test1").get();
|
||||
prepareSearch("test1").get();
|
||||
|
||||
e = expectThrows(IllegalArgumentException.class, () -> client().prepareSearch("test1", "test2").get());
|
||||
assertThat(
|
||||
|
@ -425,8 +424,7 @@ public class TransportSearchIT extends ESIntegTestCase {
|
|||
client().prepareIndex("test").setId("2").setSource("created_date", "2020-01-02").get();
|
||||
client().prepareIndex("test").setId("3").setSource("created_date", "2020-01-03").get();
|
||||
assertBusy(() -> {
|
||||
SearchResponse resp = client().prepareSearch("test")
|
||||
.setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03"))
|
||||
SearchResponse resp = prepareSearch("test").setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03"))
|
||||
.setPreFilterShardSize(randomIntBetween(1, 3))
|
||||
.get();
|
||||
assertThat(resp.getHits().getTotalHits().value, equalTo(2L));
|
||||
|
@ -442,8 +440,7 @@ public class TransportSearchIT extends ESIntegTestCase {
|
|||
final CountDownLatch latch = new CountDownLatch(10);
|
||||
for (int i = 0; i < 10; i++) {
|
||||
int batchReduceSize = randomIntBetween(2, Math.max(numShards + 1, 3));
|
||||
SearchRequest request = client().prepareSearch("test")
|
||||
.addAggregation(new TestAggregationBuilder("test"))
|
||||
SearchRequest request = prepareSearch("test").addAggregation(new TestAggregationBuilder("test"))
|
||||
.setBatchedReduceSize(batchReduceSize)
|
||||
.request();
|
||||
final int index = i;
|
||||
|
@ -484,8 +481,7 @@ public class TransportSearchIT extends ESIntegTestCase {
|
|||
final CountDownLatch latch = new CountDownLatch(10);
|
||||
for (int i = 0; i < 10; i++) {
|
||||
int batchReduceSize = randomIntBetween(2, Math.max(numShards + 1, 3));
|
||||
SearchRequest request = client().prepareSearch("test")
|
||||
.addAggregation(new TestAggregationBuilder("test"))
|
||||
SearchRequest request = prepareSearch("test").addAggregation(new TestAggregationBuilder("test"))
|
||||
.setBatchedReduceSize(batchReduceSize)
|
||||
.request();
|
||||
final int index = i;
|
||||
|
@ -522,8 +518,7 @@ public class TransportSearchIT extends ESIntegTestCase {
|
|||
final CountDownLatch latch = new CountDownLatch(10);
|
||||
for (int i = 0; i < 10; i++) {
|
||||
int batchReduceSize = randomIntBetween(2, Math.max(numShards + 1, 3));
|
||||
SearchRequest request = client().prepareSearch("boom")
|
||||
.setBatchedReduceSize(batchReduceSize)
|
||||
SearchRequest request = prepareSearch("boom").setBatchedReduceSize(batchReduceSize)
|
||||
.setAllowPartialSearchResults(false)
|
||||
.request();
|
||||
final int index = i;
|
||||
|
|
|
@ -97,7 +97,7 @@ public class IndexingMasterFailoverIT extends ESIntegTestCase {
|
|||
|
||||
ensureGreen("myindex");
|
||||
refresh();
|
||||
assertThat(client().prepareSearch("myindex").get().getHits().getTotalHits().value, equalTo(10L));
|
||||
assertThat(prepareSearch("myindex").get().getHits().getTotalHits().value, equalTo(10L));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -269,23 +269,22 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
|||
).actionGet();
|
||||
|
||||
logger.info("--> checking single filtering alias search");
|
||||
SearchResponse searchResponse = client().prepareSearch("foos").setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
SearchResponse searchResponse = prepareSearch("foos").setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
assertHits(searchResponse.getHits(), "1");
|
||||
|
||||
logger.info("--> checking single filtering alias wildcard search");
|
||||
searchResponse = client().prepareSearch("fo*").setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
searchResponse = prepareSearch("fo*").setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
assertHits(searchResponse.getHits(), "1");
|
||||
|
||||
searchResponse = client().prepareSearch("tests").setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
searchResponse = prepareSearch("tests").setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
assertHits(searchResponse.getHits(), "1", "2", "3");
|
||||
|
||||
logger.info("--> checking single filtering alias search with sort");
|
||||
searchResponse = client().prepareSearch("tests").setQuery(QueryBuilders.matchAllQuery()).addSort("_index", SortOrder.ASC).get();
|
||||
searchResponse = prepareSearch("tests").setQuery(QueryBuilders.matchAllQuery()).addSort("_index", SortOrder.ASC).get();
|
||||
assertHits(searchResponse.getHits(), "1", "2", "3");
|
||||
|
||||
logger.info("--> checking single filtering alias search with global facets");
|
||||
searchResponse = client().prepareSearch("tests")
|
||||
.setQuery(QueryBuilders.matchQuery("name", "bar"))
|
||||
searchResponse = prepareSearch("tests").setQuery(QueryBuilders.matchQuery("name", "bar"))
|
||||
.addAggregation(AggregationBuilders.global("global").subAggregation(AggregationBuilders.terms("test").field("name")))
|
||||
.get();
|
||||
assertNoFailures(searchResponse);
|
||||
|
@ -294,8 +293,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
|||
assertThat(terms.getBuckets().size(), equalTo(4));
|
||||
|
||||
logger.info("--> checking single filtering alias search with global facets and sort");
|
||||
searchResponse = client().prepareSearch("tests")
|
||||
.setQuery(QueryBuilders.matchQuery("name", "bar"))
|
||||
searchResponse = prepareSearch("tests").setQuery(QueryBuilders.matchQuery("name", "bar"))
|
||||
.addAggregation(AggregationBuilders.global("global").subAggregation(AggregationBuilders.terms("test").field("name")))
|
||||
.addSort("_index", SortOrder.ASC)
|
||||
.get();
|
||||
|
@ -305,8 +303,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
|||
assertThat(terms.getBuckets().size(), equalTo(4));
|
||||
|
||||
logger.info("--> checking single filtering alias search with non-global facets");
|
||||
searchResponse = client().prepareSearch("tests")
|
||||
.setQuery(QueryBuilders.matchQuery("name", "bar"))
|
||||
searchResponse = prepareSearch("tests").setQuery(QueryBuilders.matchQuery("name", "bar"))
|
||||
.addAggregation(AggregationBuilders.terms("test").field("name"))
|
||||
.addSort("_index", SortOrder.ASC)
|
||||
.get();
|
||||
|
@ -318,7 +315,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
|||
assertHits(searchResponse.getHits(), "1", "2");
|
||||
|
||||
logger.info("--> checking single non-filtering alias search");
|
||||
searchResponse = client().prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
searchResponse = prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
assertHits(searchResponse.getHits(), "1", "2", "3", "4");
|
||||
|
||||
logger.info("--> checking non-filtering alias and filtering alias search");
|
||||
|
@ -376,18 +373,18 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
|||
refresh();
|
||||
|
||||
logger.info("--> checking filtering alias for two indices");
|
||||
SearchResponse searchResponse = client().prepareSearch("foos").setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
SearchResponse searchResponse = prepareSearch("foos").setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
assertHits(searchResponse.getHits(), "1", "5");
|
||||
assertThat(
|
||||
client().prepareSearch("foos").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value,
|
||||
prepareSearch("foos").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value,
|
||||
equalTo(2L)
|
||||
);
|
||||
|
||||
logger.info("--> checking filtering alias for one index");
|
||||
searchResponse = client().prepareSearch("bars").setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
searchResponse = prepareSearch("bars").setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
assertHits(searchResponse.getHits(), "2");
|
||||
assertThat(
|
||||
client().prepareSearch("bars").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value,
|
||||
prepareSearch("bars").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value,
|
||||
equalTo(1L)
|
||||
);
|
||||
|
||||
|
@ -614,7 +611,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
|||
|
||||
logger.info("--> checking counts before delete");
|
||||
assertThat(
|
||||
client().prepareSearch("bars").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value,
|
||||
prepareSearch("bars").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value,
|
||||
equalTo(1L)
|
||||
);
|
||||
}
|
||||
|
@ -1141,8 +1138,8 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
|||
client().prepareIndex("my-index").setSource("timestamp", "2016-12-12").get();
|
||||
if (i % 2 == 0) {
|
||||
refresh();
|
||||
assertHitCount(client().prepareSearch("filter1"), i);
|
||||
assertHitCount(client().prepareSearch("filter2"), i);
|
||||
assertHitCount(prepareSearch("filter1"), i);
|
||||
assertHitCount(prepareSearch("filter2"), i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1239,7 +1236,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
|||
"test_2",
|
||||
() -> assertAcked(indicesAdmin().prepareAliases().addAlias("test_2", "test").removeIndex("test"))
|
||||
);
|
||||
assertHitCount(client().prepareSearch("test"), 1);
|
||||
assertHitCount(prepareSearch("test"), 1);
|
||||
}
|
||||
|
||||
public void testHiddenAliasesMustBeConsistent() {
|
||||
|
@ -1331,22 +1328,21 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
|||
refresh(writeIndex, nonWriteIndex);
|
||||
|
||||
// Make sure that the doc written to the alias made it
|
||||
SearchResponse searchResponse = client().prepareSearch(writeIndex).setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
SearchResponse searchResponse = prepareSearch(writeIndex).setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
assertHits(searchResponse.getHits(), "2", "3");
|
||||
|
||||
// Ensure that all docs can be gotten through the alias
|
||||
searchResponse = client().prepareSearch(alias).setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
searchResponse = prepareSearch(alias).setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
assertHits(searchResponse.getHits(), "1", "2", "3");
|
||||
|
||||
// And querying using a wildcard with indices options set to expand hidden
|
||||
searchResponse = client().prepareSearch("alias*")
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
searchResponse = prepareSearch("alias*").setQuery(QueryBuilders.matchAllQuery())
|
||||
.setIndicesOptions(IndicesOptions.fromOptions(false, false, true, false, true, true, true, false, false))
|
||||
.get();
|
||||
assertHits(searchResponse.getHits(), "1", "2", "3");
|
||||
|
||||
// And that querying the alias with a wildcard and no expand options fails
|
||||
searchResponse = client().prepareSearch("alias*").setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
searchResponse = prepareSearch("alias*").setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
assertThat(searchResponse.getHits().getHits(), emptyArray());
|
||||
}
|
||||
|
||||
|
|
|
@ -265,7 +265,7 @@ public class SimpleBlocksIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
indicesAdmin().prepareRefresh(indexName).get();
|
||||
assertHitCount(client().prepareSearch(indexName).setSize(0), nbDocs);
|
||||
assertHitCount(prepareSearch(indexName).setSize(0), nbDocs);
|
||||
}
|
||||
|
||||
public void testSameBlockTwice() throws Exception {
|
||||
|
@ -390,7 +390,7 @@ public class SimpleBlocksIT extends ESIntegTestCase {
|
|||
disableIndexBlock(indexName, block);
|
||||
}
|
||||
refresh(indexName);
|
||||
assertHitCount(client().prepareSearch(indexName).setSize(0).setTrackTotalHitsUpTo(TRACK_TOTAL_HITS_ACCURATE), nbDocs);
|
||||
assertHitCount(prepareSearch(indexName).setSize(0).setTrackTotalHitsUpTo(TRACK_TOTAL_HITS_ACCURATE), nbDocs);
|
||||
}
|
||||
|
||||
public void testAddBlockWhileDeletingIndices() throws Exception {
|
||||
|
|
|
@ -42,7 +42,7 @@ public class BroadcastActionsIT extends ESIntegTestCase {
|
|||
// check count
|
||||
for (int i = 0; i < 5; i++) {
|
||||
// test successful
|
||||
SearchResponse countResponse = client().prepareSearch("test").setSize(0).setQuery(matchAllQuery()).get();
|
||||
SearchResponse countResponse = prepareSearch("test").setSize(0).setQuery(matchAllQuery()).get();
|
||||
assertThat(countResponse.getHits().getTotalHits().value, equalTo(2L));
|
||||
assertThat(countResponse.getTotalShards(), equalTo(numShards.numPrimaries));
|
||||
assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries));
|
||||
|
|
|
@ -213,7 +213,7 @@ public class RareClusterStateIT extends ESIntegTestCase {
|
|||
long dataClusterStateVersion = internalCluster().clusterService(dataNode).state().version();
|
||||
assertThat(masterClusterStateVersion, equalTo(dataClusterStateVersion));
|
||||
});
|
||||
assertHitCount(client().prepareSearch("test"), 0);
|
||||
assertHitCount(prepareSearch("test"), 0);
|
||||
}
|
||||
|
||||
public void testDelayedMappingPropagationOnPrimary() throws Exception {
|
||||
|
|
|
@ -498,7 +498,7 @@ public class ShardRoutingRoleIT extends ESIntegTestCase {
|
|||
}
|
||||
// Regular search
|
||||
for (int i = 0; i < 10; i++) {
|
||||
final var search = client().prepareSearch(INDEX_NAME).setProfile(true);
|
||||
final var search = prepareSearch(INDEX_NAME).setProfile(true);
|
||||
switch (randomIntBetween(0, 2)) {
|
||||
case 0 -> search.setRouting(randomAlphaOfLength(10));
|
||||
case 1 -> search.setPreference(randomSearchPreference(routingTableWatcher.numShards, internalCluster().getNodeNames()));
|
||||
|
|
|
@ -195,7 +195,7 @@ public class MockDiskUsagesIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
client().prepareIndex("test").setId("1").setSource("foo", "bar").setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get();
|
||||
assertSearchHits(client().prepareSearch("test"), "1");
|
||||
assertSearchHits(prepareSearch("test"), "1");
|
||||
|
||||
// Move all nodes above the low watermark so no shard movement can occur, and at least one node above the flood stage watermark so
|
||||
// the index is blocked
|
||||
|
@ -221,7 +221,7 @@ public class MockDiskUsagesIT extends ESIntegTestCase {
|
|||
client().prepareIndex().setIndex("test").setId("2").setSource("foo", "bar"),
|
||||
IndexMetadata.INDEX_READ_ONLY_ALLOW_DELETE_BLOCK
|
||||
);
|
||||
assertSearchHits(client().prepareSearch("test"), "1");
|
||||
assertSearchHits(prepareSearch("test"), "1");
|
||||
|
||||
logger.info("--> index is confirmed read-only, releasing disk space");
|
||||
|
||||
|
@ -240,7 +240,7 @@ public class MockDiskUsagesIT extends ESIntegTestCase {
|
|||
throw new AssertionError("retrying", e);
|
||||
}
|
||||
});
|
||||
assertSearchHits(client().prepareSearch("test"), "1", "3");
|
||||
assertSearchHits(prepareSearch("test"), "1", "3");
|
||||
}
|
||||
|
||||
public void testOnlyMovesEnoughShardsToDropBelowHighWatermark() throws Exception {
|
||||
|
|
|
@ -154,14 +154,14 @@ public class DocumentActionsIT extends ESIntegTestCase {
|
|||
// check count
|
||||
for (int i = 0; i < 5; i++) {
|
||||
// test successful
|
||||
SearchResponse countResponse = client().prepareSearch("test").setSize(0).setQuery(matchAllQuery()).execute().actionGet();
|
||||
SearchResponse countResponse = prepareSearch("test").setSize(0).setQuery(matchAllQuery()).execute().actionGet();
|
||||
assertNoFailures(countResponse);
|
||||
assertThat(countResponse.getHits().getTotalHits().value, equalTo(2L));
|
||||
assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries));
|
||||
assertThat(countResponse.getFailedShards(), equalTo(0));
|
||||
|
||||
// count with no query is a match all one
|
||||
countResponse = client().prepareSearch("test").setSize(0).execute().actionGet();
|
||||
countResponse = prepareSearch("test").setSize(0).execute().actionGet();
|
||||
assertThat(
|
||||
"Failures " + countResponse.getShardFailures(),
|
||||
countResponse.getShardFailures() == null ? 0 : countResponse.getShardFailures().length,
|
||||
|
|
|
@ -139,7 +139,7 @@ public class FinalPipelineIT extends ESIntegTestCase {
|
|||
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
|
||||
.get();
|
||||
assertEquals(RestStatus.CREATED, indexResponse.status());
|
||||
SearchResponse target = client().prepareSearch("target").get();
|
||||
SearchResponse target = prepareSearch("target").get();
|
||||
assertEquals(1, target.getHits().getTotalHits().value);
|
||||
assertFalse(target.getHits().getAt(0).getSourceAsMap().containsKey("final"));
|
||||
}
|
||||
|
@ -165,7 +165,7 @@ public class FinalPipelineIT extends ESIntegTestCase {
|
|||
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
|
||||
.get();
|
||||
assertEquals(RestStatus.CREATED, indexResponse.status());
|
||||
SearchResponse target = client().prepareSearch("target").get();
|
||||
SearchResponse target = prepareSearch("target").get();
|
||||
assertEquals(1, target.getHits().getTotalHits().value);
|
||||
assertEquals(true, target.getHits().getAt(0).getSourceAsMap().get("final"));
|
||||
}
|
||||
|
@ -191,7 +191,7 @@ public class FinalPipelineIT extends ESIntegTestCase {
|
|||
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
|
||||
.get();
|
||||
assertEquals(RestStatus.CREATED, indexResponse.status());
|
||||
SearchResponse target = client().prepareSearch("target").get();
|
||||
SearchResponse target = prepareSearch("target").get();
|
||||
assertEquals(1, target.getHits().getTotalHits().value);
|
||||
assertFalse(target.getHits().getAt(0).getSourceAsMap().containsKey("final"));
|
||||
}
|
||||
|
@ -217,7 +217,7 @@ public class FinalPipelineIT extends ESIntegTestCase {
|
|||
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
|
||||
.get();
|
||||
assertEquals(RestStatus.CREATED, indexResponse.status());
|
||||
SearchResponse target = client().prepareSearch("target").get();
|
||||
SearchResponse target = prepareSearch("target").get();
|
||||
assertEquals(1, target.getHits().getTotalHits().value);
|
||||
assertTrue(target.getHits().getAt(0).getSourceAsMap().containsKey("final"));
|
||||
}
|
||||
|
|
|
@ -40,21 +40,19 @@ public class HiddenIndexIT extends ESIntegTestCase {
|
|||
client().prepareIndex("hidden-index").setSource("foo", "bar").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get();
|
||||
|
||||
// default not visible to wildcard expansion
|
||||
SearchResponse searchResponse = client().prepareSearch(randomFrom("*", "_all", "h*", "*index"))
|
||||
.setSize(1000)
|
||||
SearchResponse searchResponse = prepareSearch(randomFrom("*", "_all", "h*", "*index")).setSize(1000)
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.get();
|
||||
boolean matchedHidden = Arrays.stream(searchResponse.getHits().getHits()).anyMatch(hit -> "hidden-index".equals(hit.getIndex()));
|
||||
assertFalse(matchedHidden);
|
||||
|
||||
// direct access allowed
|
||||
searchResponse = client().prepareSearch("hidden-index").setSize(1000).setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
searchResponse = prepareSearch("hidden-index").setSize(1000).setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
matchedHidden = Arrays.stream(searchResponse.getHits().getHits()).anyMatch(hit -> "hidden-index".equals(hit.getIndex()));
|
||||
assertTrue(matchedHidden);
|
||||
|
||||
// with indices option to include hidden
|
||||
searchResponse = client().prepareSearch(randomFrom("*", "_all", "h*", "*index"))
|
||||
.setSize(1000)
|
||||
searchResponse = prepareSearch(randomFrom("*", "_all", "h*", "*index")).setSize(1000)
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_HIDDEN)
|
||||
.get();
|
||||
|
@ -64,16 +62,13 @@ public class HiddenIndexIT extends ESIntegTestCase {
|
|||
// implicit based on use of pattern starting with . and a wildcard
|
||||
assertAcked(indicesAdmin().prepareCreate(".hidden-index").setSettings(Settings.builder().put("index.hidden", true).build()).get());
|
||||
client().prepareIndex(".hidden-index").setSource("foo", "bar").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get();
|
||||
searchResponse = client().prepareSearch(randomFrom(".*", ".hidden-*")).setSize(1000).setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
searchResponse = prepareSearch(randomFrom(".*", ".hidden-*")).setSize(1000).setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
matchedHidden = Arrays.stream(searchResponse.getHits().getHits()).anyMatch(hit -> ".hidden-index".equals(hit.getIndex()));
|
||||
assertTrue(matchedHidden);
|
||||
|
||||
// make index not hidden
|
||||
updateIndexSettings(Settings.builder().put("index.hidden", false), "hidden-index");
|
||||
searchResponse = client().prepareSearch(randomFrom("*", "_all", "h*", "*index"))
|
||||
.setSize(1000)
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.get();
|
||||
searchResponse = prepareSearch(randomFrom("*", "_all", "h*", "*index")).setSize(1000).setQuery(QueryBuilders.matchAllQuery()).get();
|
||||
matchedHidden = Arrays.stream(searchResponse.getHits().getHits()).anyMatch(hit -> "hidden-index".equals(hit.getIndex()));
|
||||
assertTrue(matchedHidden);
|
||||
}
|
||||
|
|
|
@ -37,7 +37,7 @@ public class IndexRequestBuilderIT extends ESIntegTestCase {
|
|||
client().prepareIndex("test").setSource(map) };
|
||||
indexRandom(true, builders);
|
||||
ElasticsearchAssertions.assertHitCount(
|
||||
client().prepareSearch("test").setQuery(QueryBuilders.termQuery("test_field", "foobar")),
|
||||
prepareSearch("test").setQuery(QueryBuilders.termQuery("test_field", "foobar")),
|
||||
builders.length
|
||||
);
|
||||
}
|
||||
|
|
|
@ -63,25 +63,25 @@ public class WaitUntilRefreshIT extends ESIntegTestCase {
|
|||
.get();
|
||||
assertEquals(RestStatus.CREATED, index.status());
|
||||
assertFalse("request shouldn't have forced a refresh", index.forcedRefresh());
|
||||
assertSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "bar")), "1");
|
||||
assertSearchHits(prepareSearch("test").setQuery(matchQuery("foo", "bar")), "1");
|
||||
}
|
||||
|
||||
public void testDelete() throws InterruptedException, ExecutionException {
|
||||
// Index normally
|
||||
indexRandom(true, client().prepareIndex("test").setId("1").setSource("foo", "bar"));
|
||||
assertSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "bar")), "1");
|
||||
assertSearchHits(prepareSearch("test").setQuery(matchQuery("foo", "bar")), "1");
|
||||
|
||||
// Now delete with blockUntilRefresh
|
||||
DeleteResponse delete = client().prepareDelete("test", "1").setRefreshPolicy(RefreshPolicy.WAIT_UNTIL).get();
|
||||
assertEquals(DocWriteResponse.Result.DELETED, delete.getResult());
|
||||
assertFalse("request shouldn't have forced a refresh", delete.forcedRefresh());
|
||||
assertNoSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "bar")));
|
||||
assertNoSearchHits(prepareSearch("test").setQuery(matchQuery("foo", "bar")));
|
||||
}
|
||||
|
||||
public void testUpdate() throws InterruptedException, ExecutionException {
|
||||
// Index normally
|
||||
indexRandom(true, client().prepareIndex("test").setId("1").setSource("foo", "bar"));
|
||||
assertSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "bar")), "1");
|
||||
assertSearchHits(prepareSearch("test").setQuery(matchQuery("foo", "bar")), "1");
|
||||
|
||||
// Update with RefreshPolicy.WAIT_UNTIL
|
||||
UpdateResponse update = client().prepareUpdate("test", "1")
|
||||
|
@ -90,7 +90,7 @@ public class WaitUntilRefreshIT extends ESIntegTestCase {
|
|||
.get();
|
||||
assertEquals(2, update.getVersion());
|
||||
assertFalse("request shouldn't have forced a refresh", update.forcedRefresh());
|
||||
assertSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "baz")), "1");
|
||||
assertSearchHits(prepareSearch("test").setQuery(matchQuery("foo", "baz")), "1");
|
||||
|
||||
// Upsert with RefreshPolicy.WAIT_UNTIL
|
||||
update = client().prepareUpdate("test", "2")
|
||||
|
@ -100,7 +100,7 @@ public class WaitUntilRefreshIT extends ESIntegTestCase {
|
|||
.get();
|
||||
assertEquals(1, update.getVersion());
|
||||
assertFalse("request shouldn't have forced a refresh", update.forcedRefresh());
|
||||
assertSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "cat")), "2");
|
||||
assertSearchHits(prepareSearch("test").setQuery(matchQuery("foo", "cat")), "2");
|
||||
|
||||
// Update-becomes-delete with RefreshPolicy.WAIT_UNTIL
|
||||
update = client().prepareUpdate("test", "2")
|
||||
|
@ -109,7 +109,7 @@ public class WaitUntilRefreshIT extends ESIntegTestCase {
|
|||
.get();
|
||||
assertEquals(2, update.getVersion());
|
||||
assertFalse("request shouldn't have forced a refresh", update.forcedRefresh());
|
||||
assertNoSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "cat")));
|
||||
assertNoSearchHits(prepareSearch("test").setQuery(matchQuery("foo", "cat")));
|
||||
}
|
||||
|
||||
public void testBulk() {
|
||||
|
@ -117,19 +117,19 @@ public class WaitUntilRefreshIT extends ESIntegTestCase {
|
|||
BulkRequestBuilder bulk = client().prepareBulk().setRefreshPolicy(RefreshPolicy.WAIT_UNTIL);
|
||||
bulk.add(client().prepareIndex("test").setId("1").setSource("foo", "bar"));
|
||||
assertBulkSuccess(bulk.get());
|
||||
assertSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "bar")), "1");
|
||||
assertSearchHits(prepareSearch("test").setQuery(matchQuery("foo", "bar")), "1");
|
||||
|
||||
// Update by bulk with RefreshPolicy.WAIT_UNTIL
|
||||
bulk = client().prepareBulk().setRefreshPolicy(RefreshPolicy.WAIT_UNTIL);
|
||||
bulk.add(client().prepareUpdate("test", "1").setDoc(Requests.INDEX_CONTENT_TYPE, "foo", "baz"));
|
||||
assertBulkSuccess(bulk.get());
|
||||
assertSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "baz")), "1");
|
||||
assertSearchHits(prepareSearch("test").setQuery(matchQuery("foo", "baz")), "1");
|
||||
|
||||
// Delete by bulk with RefreshPolicy.WAIT_UNTIL
|
||||
bulk = client().prepareBulk().setRefreshPolicy(RefreshPolicy.WAIT_UNTIL);
|
||||
bulk.add(client().prepareDelete("test", "1"));
|
||||
assertBulkSuccess(bulk.get());
|
||||
assertNoSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "bar")));
|
||||
assertNoSearchHits(prepareSearch("test").setQuery(matchQuery("foo", "bar")));
|
||||
|
||||
// Update makes a noop
|
||||
bulk = client().prepareBulk().setRefreshPolicy(RefreshPolicy.WAIT_UNTIL);
|
||||
|
@ -153,7 +153,7 @@ public class WaitUntilRefreshIT extends ESIntegTestCase {
|
|||
}
|
||||
assertEquals(RestStatus.CREATED, index.get().status());
|
||||
assertFalse("request shouldn't have forced a refresh", index.get().forcedRefresh());
|
||||
assertSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "bar")), "1");
|
||||
assertSearchHits(prepareSearch("test").setQuery(matchQuery("foo", "bar")), "1");
|
||||
}
|
||||
|
||||
private void assertBulkSuccess(BulkResponse response) {
|
||||
|
|
|
@ -108,8 +108,7 @@ public class MaxDocsLimitIT extends ESIntegTestCase {
|
|||
);
|
||||
assertThat(deleteError.getMessage(), containsString("Number of documents in the index can't exceed [" + maxDocs.get() + "]"));
|
||||
indicesAdmin().prepareRefresh("test").get();
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(new MatchAllQueryBuilder())
|
||||
SearchResponse searchResponse = prepareSearch("test").setQuery(new MatchAllQueryBuilder())
|
||||
.setTrackTotalHitsUpTo(Integer.MAX_VALUE)
|
||||
.setSize(0)
|
||||
.get();
|
||||
|
@ -121,8 +120,7 @@ public class MaxDocsLimitIT extends ESIntegTestCase {
|
|||
internalCluster().fullRestart();
|
||||
internalCluster().ensureAtLeastNumDataNodes(2);
|
||||
ensureGreen("test");
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(new MatchAllQueryBuilder())
|
||||
searchResponse = prepareSearch("test").setQuery(new MatchAllQueryBuilder())
|
||||
.setTrackTotalHitsUpTo(Integer.MAX_VALUE)
|
||||
.setSize(0)
|
||||
.get();
|
||||
|
@ -137,8 +135,7 @@ public class MaxDocsLimitIT extends ESIntegTestCase {
|
|||
assertThat(indexingResult.numFailures, greaterThan(0));
|
||||
assertThat(indexingResult.numSuccess, both(greaterThan(0)).and(lessThanOrEqualTo(maxDocs.get())));
|
||||
indicesAdmin().prepareRefresh("test").get();
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(new MatchAllQueryBuilder())
|
||||
SearchResponse searchResponse = prepareSearch("test").setQuery(new MatchAllQueryBuilder())
|
||||
.setTrackTotalHitsUpTo(Integer.MAX_VALUE)
|
||||
.setSize(0)
|
||||
.get();
|
||||
|
@ -155,8 +152,7 @@ public class MaxDocsLimitIT extends ESIntegTestCase {
|
|||
assertThat(indexingResult.numSuccess, equalTo(0));
|
||||
}
|
||||
indicesAdmin().prepareRefresh("test").get();
|
||||
searchResponse = client().prepareSearch("test")
|
||||
.setQuery(new MatchAllQueryBuilder())
|
||||
searchResponse = prepareSearch("test").setQuery(new MatchAllQueryBuilder())
|
||||
.setTrackTotalHitsUpTo(Integer.MAX_VALUE)
|
||||
.setSize(0)
|
||||
.get();
|
||||
|
|
|
@ -37,8 +37,7 @@ public class CopyToMapperIntegrationIT extends ESIntegTestCase {
|
|||
|
||||
SubAggCollectionMode aggCollectionMode = randomFrom(SubAggCollectionMode.values());
|
||||
|
||||
SearchResponse response = client().prepareSearch("test-idx")
|
||||
.setQuery(QueryBuilders.termQuery("even", true))
|
||||
SearchResponse response = prepareSearch("test-idx").setQuery(QueryBuilders.termQuery("even", true))
|
||||
.addAggregation(AggregationBuilders.terms("test").field("test_field").size(recordCount * 2).collectMode(aggCollectionMode))
|
||||
.addAggregation(
|
||||
AggregationBuilders.terms("test_raw").field("test_field_raw").size(recordCount * 2).collectMode(aggCollectionMode)
|
||||
|
@ -67,7 +66,7 @@ public class CopyToMapperIntegrationIT extends ESIntegTestCase {
|
|||
assertAcked(indicesAdmin().prepareCreate("test-idx").setMapping(mapping));
|
||||
client().prepareIndex("test-idx").setId("1").setSource("foo", "bar").get();
|
||||
indicesAdmin().prepareRefresh("test-idx").execute().actionGet();
|
||||
SearchResponse response = client().prepareSearch("test-idx").setQuery(QueryBuilders.termQuery("root.top.child", "bar")).get();
|
||||
SearchResponse response = prepareSearch("test-idx").setQuery(QueryBuilders.termQuery("root.top.child", "bar")).get();
|
||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
||||
}
|
||||
|
||||
|
|
|
@ -372,15 +372,17 @@ public class DynamicMappingIT extends ESIntegTestCase {
|
|||
assertFalse(bulkResponse.hasFailures());
|
||||
|
||||
assertSearchHits(
|
||||
client().prepareSearch("test")
|
||||
.setQuery(new GeoBoundingBoxQueryBuilder("location").setCorners(new GeoPoint(42, -72), new GeoPoint(40, -74))),
|
||||
prepareSearch("test").setQuery(
|
||||
new GeoBoundingBoxQueryBuilder("location").setCorners(new GeoPoint(42, -72), new GeoPoint(40, -74))
|
||||
),
|
||||
"1",
|
||||
"2",
|
||||
"4"
|
||||
);
|
||||
assertSearchHits(
|
||||
client().prepareSearch("test")
|
||||
.setQuery(new GeoBoundingBoxQueryBuilder("address.location").setCorners(new GeoPoint(42, -72), new GeoPoint(40, -74))),
|
||||
prepareSearch("test").setQuery(
|
||||
new GeoBoundingBoxQueryBuilder("address.location").setCorners(new GeoPoint(42, -72), new GeoPoint(40, -74))
|
||||
),
|
||||
"3"
|
||||
);
|
||||
}
|
||||
|
@ -460,15 +462,15 @@ public class DynamicMappingIT extends ESIntegTestCase {
|
|||
assertFalse(bulkItemResponses.buildFailureMessage(), bulkItemResponses.hasFailures());
|
||||
|
||||
{
|
||||
SearchResponse searchResponse = client().prepareSearch("test").setQuery(new MatchQueryBuilder("one", "one")).get();
|
||||
SearchResponse searchResponse = prepareSearch("test").setQuery(new MatchQueryBuilder("one", "one")).get();
|
||||
assertEquals(1, searchResponse.getHits().getTotalHits().value);
|
||||
}
|
||||
{
|
||||
SearchResponse searchResponse = client().prepareSearch("test").setQuery(new MatchQueryBuilder("one.two", 3.5)).get();
|
||||
SearchResponse searchResponse = prepareSearch("test").setQuery(new MatchQueryBuilder("one.two", 3.5)).get();
|
||||
assertEquals(1, searchResponse.getHits().getTotalHits().value);
|
||||
}
|
||||
{
|
||||
SearchResponse searchResponse = client().prepareSearch("test").setQuery(new MatchQueryBuilder("one.two.three", "1")).get();
|
||||
SearchResponse searchResponse = prepareSearch("test").setQuery(new MatchQueryBuilder("one.two.three", "1")).get();
|
||||
assertEquals(1, searchResponse.getHits().getTotalHits().value);
|
||||
}
|
||||
}
|
||||
|
@ -508,21 +510,19 @@ public class DynamicMappingIT extends ESIntegTestCase {
|
|||
assertFalse(bulkItemResponses.buildFailureMessage(), bulkItemResponses.hasFailures());
|
||||
|
||||
{
|
||||
SearchResponse searchResponse = client().prepareSearch("test").setQuery(new MatchQueryBuilder("obj.one", 1)).get();
|
||||
SearchResponse searchResponse = prepareSearch("test").setQuery(new MatchQueryBuilder("obj.one", 1)).get();
|
||||
assertEquals(1, searchResponse.getHits().getTotalHits().value);
|
||||
}
|
||||
{
|
||||
SearchResponse searchResponse = client().prepareSearch("test").setQuery(new MatchQueryBuilder("anything", "anything")).get();
|
||||
SearchResponse searchResponse = prepareSearch("test").setQuery(new MatchQueryBuilder("anything", "anything")).get();
|
||||
assertEquals(1, searchResponse.getHits().getTotalHits().value);
|
||||
}
|
||||
{
|
||||
SearchResponse searchResponse = client().prepareSearch("test").setQuery(new MatchQueryBuilder("obj.runtime.one", "one")).get();
|
||||
SearchResponse searchResponse = prepareSearch("test").setQuery(new MatchQueryBuilder("obj.runtime.one", "one")).get();
|
||||
assertEquals(1, searchResponse.getHits().getTotalHits().value);
|
||||
}
|
||||
{
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(new MatchQueryBuilder("obj.runtime.one.two", "1"))
|
||||
.get();
|
||||
SearchResponse searchResponse = prepareSearch("test").setQuery(new MatchQueryBuilder("obj.runtime.one.two", "1")).get();
|
||||
assertEquals(1, searchResponse.getHits().getTotalHits().value);
|
||||
}
|
||||
|
||||
|
@ -569,9 +569,7 @@ public class DynamicMappingIT extends ESIntegTestCase {
|
|||
);
|
||||
|
||||
{
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(new MatchQueryBuilder("obj.runtime.dynamic.number", 1))
|
||||
.get();
|
||||
SearchResponse searchResponse = prepareSearch("test").setQuery(new MatchQueryBuilder("obj.runtime.dynamic.number", 1)).get();
|
||||
assertEquals(1, searchResponse.getHits().getTotalHits().value);
|
||||
}
|
||||
|
||||
|
|
|
@ -47,9 +47,9 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
|
|||
|
||||
client().prepareIndex("my-index").setId("1").setSource("title", "Multi fields").setRefreshPolicy(IMMEDIATE).get();
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("my-index").setQuery(matchQuery("title", "multi")).get();
|
||||
SearchResponse searchResponse = prepareSearch("my-index").setQuery(matchQuery("title", "multi")).get();
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||
searchResponse = client().prepareSearch("my-index").setQuery(matchQuery("title.not_analyzed", "Multi fields")).get();
|
||||
searchResponse = prepareSearch("my-index").setQuery(matchQuery("title.not_analyzed", "Multi fields")).get();
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||
|
||||
assertAcked(indicesAdmin().preparePutMapping("my-index").setSource(createPutMappingSource()));
|
||||
|
@ -68,7 +68,7 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
|
|||
|
||||
client().prepareIndex("my-index").setId("1").setSource("title", "Multi fields").setRefreshPolicy(IMMEDIATE).get();
|
||||
|
||||
searchResponse = client().prepareSearch("my-index").setQuery(matchQuery("title.uncased", "Multi")).get();
|
||||
searchResponse = prepareSearch("my-index").setQuery(matchQuery("title.uncased", "Multi")).get();
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||
}
|
||||
|
||||
|
@ -92,12 +92,11 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
|
|||
|
||||
GeoPoint point = new GeoPoint(51, 19);
|
||||
client().prepareIndex("my-index").setId("1").setSource("a", point.toString()).setRefreshPolicy(IMMEDIATE).get();
|
||||
SearchResponse countResponse = client().prepareSearch("my-index")
|
||||
.setSize(0)
|
||||
SearchResponse countResponse = prepareSearch("my-index").setSize(0)
|
||||
.setQuery(constantScoreQuery(geoDistanceQuery("a").point(51, 19).distance(50, DistanceUnit.KILOMETERS)))
|
||||
.get();
|
||||
assertThat(countResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||
countResponse = client().prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", point.geohash())).get();
|
||||
countResponse = prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", point.geohash())).get();
|
||||
assertThat(countResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||
}
|
||||
|
||||
|
@ -119,7 +118,7 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
|
|||
assertThat(bField.get("type").toString(), equalTo("keyword"));
|
||||
|
||||
client().prepareIndex("my-index").setId("1").setSource("a", "complete me").setRefreshPolicy(IMMEDIATE).get();
|
||||
SearchResponse countResponse = client().prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", "complete me")).get();
|
||||
SearchResponse countResponse = prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", "complete me")).get();
|
||||
assertThat(countResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||
}
|
||||
|
||||
|
@ -141,7 +140,7 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
|
|||
assertThat(bField.get("type").toString(), equalTo("keyword"));
|
||||
|
||||
client().prepareIndex("my-index").setId("1").setSource("a", "127.0.0.1").setRefreshPolicy(IMMEDIATE).get();
|
||||
SearchResponse countResponse = client().prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", "127.0.0.1")).get();
|
||||
SearchResponse countResponse = prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", "127.0.0.1")).get();
|
||||
assertThat(countResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||
}
|
||||
|
||||
|
|
|
@ -42,10 +42,10 @@ public class CustomQueryParserIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testCustomDummyQuery() {
|
||||
assertHitCount(client().prepareSearch("index").setQuery(new DummyQueryBuilder()), 1L);
|
||||
assertHitCount(prepareSearch("index").setQuery(new DummyQueryBuilder()), 1L);
|
||||
}
|
||||
|
||||
public void testCustomDummyQueryWithinBooleanQuery() {
|
||||
assertHitCount(client().prepareSearch("index").setQuery(new BoolQueryBuilder().must(new DummyQueryBuilder())), 1L);
|
||||
assertHitCount(prepareSearch("index").setQuery(new BoolQueryBuilder().must(new DummyQueryBuilder())), 1L);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -47,10 +47,10 @@ public class MatchPhraseQueryIT extends ESIntegTestCase {
|
|||
MatchPhraseQueryBuilder baseQuery = matchPhraseQuery("name", "the who").analyzer("standard_stopwords");
|
||||
|
||||
MatchPhraseQueryBuilder matchNoneQuery = baseQuery.zeroTermsQuery(ZeroTermsQueryOption.NONE);
|
||||
assertHitCount(client().prepareSearch(INDEX).setQuery(matchNoneQuery), 0L);
|
||||
assertHitCount(prepareSearch(INDEX).setQuery(matchNoneQuery), 0L);
|
||||
|
||||
MatchPhraseQueryBuilder matchAllQuery = baseQuery.zeroTermsQuery(ZeroTermsQueryOption.ALL);
|
||||
assertHitCount(client().prepareSearch(INDEX).setQuery(matchAllQuery), 2L);
|
||||
assertHitCount(prepareSearch(INDEX).setQuery(matchAllQuery), 2L);
|
||||
}
|
||||
|
||||
private List<IndexRequestBuilder> getIndexRequests() {
|
||||
|
|
|
@ -255,7 +255,7 @@ public class RemoveCorruptedShardDataCommandIT extends ESIntegTestCase {
|
|||
|
||||
ensureGreen(indexName);
|
||||
|
||||
assertHitCount(client().prepareSearch(indexName).setQuery(matchAllQuery()), expectedNumDocs);
|
||||
assertHitCount(prepareSearch(indexName).setQuery(matchAllQuery()), expectedNumDocs);
|
||||
}
|
||||
|
||||
public void testCorruptTranslogTruncation() throws Exception {
|
||||
|
@ -424,13 +424,13 @@ public class RemoveCorruptedShardDataCommandIT extends ESIntegTestCase {
|
|||
ensureYellow(indexName);
|
||||
|
||||
// Run a search and make sure it succeeds
|
||||
assertHitCount(client().prepareSearch(indexName).setQuery(matchAllQuery()), numDocsToKeep);
|
||||
assertHitCount(prepareSearch(indexName).setQuery(matchAllQuery()), numDocsToKeep);
|
||||
|
||||
logger.info("--> starting the replica node to test recovery");
|
||||
internalCluster().startNode(node2PathSettings);
|
||||
ensureGreen(indexName);
|
||||
for (String node : internalCluster().nodesInclude(indexName)) {
|
||||
SearchRequestBuilder q = client().prepareSearch(indexName).setPreference("_only_nodes:" + node).setQuery(matchAllQuery());
|
||||
SearchRequestBuilder q = prepareSearch(indexName).setPreference("_only_nodes:" + node).setQuery(matchAllQuery());
|
||||
assertHitCount(q, numDocsToKeep);
|
||||
}
|
||||
final RecoveryResponse recoveryResponse = indicesAdmin().prepareRecoveries(indexName).setActiveOnly(false).get();
|
||||
|
@ -513,7 +513,7 @@ public class RemoveCorruptedShardDataCommandIT extends ESIntegTestCase {
|
|||
ensureYellow();
|
||||
|
||||
// Run a search and make sure it succeeds
|
||||
assertHitCount(client().prepareSearch(indexName).setQuery(matchAllQuery()), totalDocs);
|
||||
assertHitCount(prepareSearch(indexName).setQuery(matchAllQuery()), totalDocs);
|
||||
|
||||
// check replica corruption
|
||||
final RemoveCorruptedShardDataCommand command = new RemoveCorruptedShardDataCommand();
|
||||
|
@ -534,7 +534,7 @@ public class RemoveCorruptedShardDataCommandIT extends ESIntegTestCase {
|
|||
internalCluster().startNode(node2PathSettings);
|
||||
ensureGreen(indexName);
|
||||
for (String node : internalCluster().nodesInclude(indexName)) {
|
||||
assertHitCount(client().prepareSearch(indexName).setPreference("_only_nodes:" + node).setQuery(matchAllQuery()), totalDocs);
|
||||
assertHitCount(prepareSearch(indexName).setPreference("_only_nodes:" + node).setQuery(matchAllQuery()), totalDocs);
|
||||
}
|
||||
|
||||
final RecoveryResponse recoveryResponse = indicesAdmin().prepareRecoveries(indexName).setActiveOnly(false).get();
|
||||
|
|
|
@ -97,8 +97,7 @@ public class CorruptedTranslogIT extends ESIntegTestCase {
|
|||
});
|
||||
|
||||
assertThat(
|
||||
expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch("test").setQuery(matchAllQuery()).get())
|
||||
.getMessage(),
|
||||
expectThrows(SearchPhaseExecutionException.class, () -> prepareSearch("test").setQuery(matchAllQuery()).get()).getMessage(),
|
||||
containsString("all shards failed")
|
||||
);
|
||||
|
||||
|
|
|
@ -107,7 +107,7 @@ public class ExceptionRetryIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
refresh();
|
||||
SearchResponse searchResponse = client().prepareSearch("index").setSize(numDocs * 2).addStoredField("_id").get();
|
||||
SearchResponse searchResponse = prepareSearch("index").setSize(numDocs * 2).addStoredField("_id").get();
|
||||
|
||||
Set<String> uniqueIds = new HashSet<>();
|
||||
long dupCounter = 0;
|
||||
|
@ -115,10 +115,9 @@ public class ExceptionRetryIT extends ESIntegTestCase {
|
|||
for (int i = 0; i < searchResponse.getHits().getHits().length; i++) {
|
||||
if (uniqueIds.add(searchResponse.getHits().getHits()[i].getId()) == false) {
|
||||
if (found_duplicate_already == false) {
|
||||
SearchResponse dupIdResponse = client().prepareSearch("index")
|
||||
.setQuery(termQuery("_id", searchResponse.getHits().getHits()[i].getId()))
|
||||
.setExplain(true)
|
||||
.get();
|
||||
SearchResponse dupIdResponse = prepareSearch("index").setQuery(
|
||||
termQuery("_id", searchResponse.getHits().getHits()[i].getId())
|
||||
).setExplain(true).get();
|
||||
assertThat(dupIdResponse.getHits().getTotalHits().value, greaterThan(1L));
|
||||
logger.info("found a duplicate id:");
|
||||
for (SearchHit hit : dupIdResponse.getHits()) {
|
||||
|
|
|
@ -55,7 +55,7 @@ public class IndexActionIT extends ESIntegTestCase {
|
|||
for (int j = 0; j < numOfChecks; j++) {
|
||||
try {
|
||||
logger.debug("running search with all types");
|
||||
SearchResponse response = client().prepareSearch("test").get();
|
||||
SearchResponse response = prepareSearch("test").get();
|
||||
if (response.getHits().getTotalHits().value != numOfDocs) {
|
||||
final String message = "Count is "
|
||||
+ response.getHits().getTotalHits().value
|
||||
|
@ -74,7 +74,7 @@ public class IndexActionIT extends ESIntegTestCase {
|
|||
}
|
||||
try {
|
||||
logger.debug("running search with a specific type");
|
||||
SearchResponse response = client().prepareSearch("test").get();
|
||||
SearchResponse response = prepareSearch("test").get();
|
||||
if (response.getHits().getTotalHits().value != numOfDocs) {
|
||||
final String message = "Count is "
|
||||
+ response.getHits().getTotalHits().value
|
||||
|
|
|
@ -395,7 +395,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase {
|
|||
public void testAllMissingLenient() throws Exception {
|
||||
createIndex("test1");
|
||||
client().prepareIndex("test1").setId("1").setSource("k", "v").setRefreshPolicy(IMMEDIATE).get();
|
||||
assertHitCount(client().prepareSearch("test2").setIndicesOptions(IndicesOptions.lenientExpandOpen()).setQuery(matchAllQuery()), 0L);
|
||||
assertHitCount(prepareSearch("test2").setIndicesOptions(IndicesOptions.lenientExpandOpen()).setQuery(matchAllQuery()), 0L);
|
||||
assertHitCount(
|
||||
client().prepareSearch("test2", "test3").setQuery(matchAllQuery()).setIndicesOptions(IndicesOptions.lenientExpandOpen()),
|
||||
0L
|
||||
|
@ -406,7 +406,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase {
|
|||
|
||||
public void testAllMissingStrict() throws Exception {
|
||||
createIndex("test1");
|
||||
expectThrows(IndexNotFoundException.class, () -> client().prepareSearch("test2").setQuery(matchAllQuery()).execute().actionGet());
|
||||
expectThrows(IndexNotFoundException.class, () -> prepareSearch("test2").setQuery(matchAllQuery()).execute().actionGet());
|
||||
|
||||
expectThrows(
|
||||
IndexNotFoundException.class,
|
||||
|
@ -606,7 +606,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
static SearchRequestBuilder search(String... indices) {
|
||||
return client().prepareSearch(indices).setQuery(matchAllQuery());
|
||||
return prepareSearch(indices).setQuery(matchAllQuery());
|
||||
}
|
||||
|
||||
static MultiSearchRequestBuilder msearch(IndicesOptions options, String... indices) {
|
||||
|
@ -614,7 +614,7 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase {
|
|||
if (options != null) {
|
||||
multiSearchRequestBuilder.setIndicesOptions(options);
|
||||
}
|
||||
return multiSearchRequestBuilder.add(client().prepareSearch(indices).setQuery(matchAllQuery()));
|
||||
return multiSearchRequestBuilder.add(prepareSearch(indices).setQuery(matchAllQuery()));
|
||||
}
|
||||
|
||||
static ClearIndicesCacheRequestBuilder clearCache(String... indices) {
|
||||
|
|
|
@ -77,8 +77,8 @@ public class ConcurrentDynamicTemplateIT extends ESIntegTestCase {
|
|||
latch.await();
|
||||
assertThat(throwable, emptyIterable());
|
||||
refresh();
|
||||
assertHitCount(client().prepareSearch("test").setQuery(QueryBuilders.matchQuery(fieldName, "test-user")), numDocs);
|
||||
assertHitCount(client().prepareSearch("test").setQuery(QueryBuilders.matchQuery(fieldName, "test user")), 0);
|
||||
assertHitCount(prepareSearch("test").setQuery(QueryBuilders.matchQuery(fieldName, "test-user")), numDocs);
|
||||
assertHitCount(prepareSearch("test").setQuery(QueryBuilders.matchQuery(fieldName, "test user")), 0);
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -60,7 +60,7 @@ public class MalformedDynamicTemplateIT extends ESIntegTestCase {
|
|||
);
|
||||
client().prepareIndex(indexName).setSource("{\"foo\" : \"bar\"}", XContentType.JSON).get();
|
||||
assertNoFailures((indicesAdmin().prepareRefresh(indexName)).get());
|
||||
assertHitCount(client().prepareSearch(indexName), 1);
|
||||
assertHitCount(prepareSearch(indexName), 1);
|
||||
|
||||
MapperParsingException ex = expectThrows(
|
||||
MapperParsingException.class,
|
||||
|
|
|
@ -86,7 +86,7 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase {
|
|||
logger.info("checking all the documents are there");
|
||||
RefreshResponse refreshResponse = indicesAdmin().prepareRefresh().execute().actionGet();
|
||||
assertThat(refreshResponse.getFailedShards(), equalTo(0));
|
||||
SearchResponse response = client().prepareSearch("test").setSize(0).execute().actionGet();
|
||||
SearchResponse response = prepareSearch("test").setSize(0).execute().actionGet();
|
||||
assertThat(response.getHits().getTotalHits().value, equalTo((long) recCount));
|
||||
|
||||
logger.info("checking all the fields are in the mappings");
|
||||
|
|
|
@ -105,10 +105,9 @@ public class IndexPrimaryRelocationIT extends ESIntegTestCase {
|
|||
finished.set(true);
|
||||
indexingThread.join();
|
||||
refresh("test");
|
||||
ElasticsearchAssertions.assertHitCount(client().prepareSearch("test").setTrackTotalHits(true), numAutoGenDocs.get());
|
||||
ElasticsearchAssertions.assertHitCount(prepareSearch("test").setTrackTotalHits(true), numAutoGenDocs.get());
|
||||
ElasticsearchAssertions.assertHitCount(
|
||||
client().prepareSearch("test")
|
||||
.setTrackTotalHits(true)// extra paranoia ;)
|
||||
prepareSearch("test").setTrackTotalHits(true)// extra paranoia ;)
|
||||
.setQuery(QueryBuilders.termQuery("auto", true)),
|
||||
numAutoGenDocs.get()
|
||||
);
|
||||
|
|
|
@ -394,7 +394,7 @@ public class IndexRecoveryIT extends AbstractIndexRecoveryIntegTestCase {
|
|||
}
|
||||
|
||||
refresh(INDEX_NAME);
|
||||
assertHitCount(client().prepareSearch(INDEX_NAME).setSize(0), numOfDocs);
|
||||
assertHitCount(prepareSearch(INDEX_NAME).setSize(0), numOfDocs);
|
||||
|
||||
final boolean closedIndex = randomBoolean();
|
||||
if (closedIndex) {
|
||||
|
@ -439,7 +439,7 @@ public class IndexRecoveryIT extends AbstractIndexRecoveryIntegTestCase {
|
|||
if (closedIndex) {
|
||||
assertAcked(indicesAdmin().prepareOpen(INDEX_NAME));
|
||||
}
|
||||
assertHitCount(client().prepareSearch(INDEX_NAME).setSize(0), numOfDocs);
|
||||
assertHitCount(prepareSearch(INDEX_NAME).setSize(0), numOfDocs);
|
||||
}
|
||||
|
||||
public void testCancelNewShardRecoveryAndUsesExistingShardCopy() throws Exception {
|
||||
|
@ -925,7 +925,7 @@ public class IndexRecoveryIT extends AbstractIndexRecoveryIntegTestCase {
|
|||
|
||||
indexRandom(true, docs);
|
||||
flush();
|
||||
assertThat(client().prepareSearch(name).setSize(0).get().getHits().getTotalHits().value, equalTo((long) numDocs));
|
||||
assertThat(prepareSearch(name).setSize(0).get().getHits().getTotalHits().value, equalTo((long) numDocs));
|
||||
return indicesAdmin().prepareStats(name).execute().actionGet();
|
||||
}
|
||||
|
||||
|
|
|
@ -43,7 +43,7 @@ public class ReplicaToPrimaryPromotionIT extends ESIntegTestCase {
|
|||
refresh(indexName);
|
||||
}
|
||||
|
||||
assertHitCount(client().prepareSearch(indexName).setSize(0), numOfDocs);
|
||||
assertHitCount(prepareSearch(indexName).setSize(0), numOfDocs);
|
||||
ensureGreen(indexName);
|
||||
|
||||
// sometimes test with a closed index
|
||||
|
@ -76,6 +76,6 @@ public class ReplicaToPrimaryPromotionIT extends ESIntegTestCase {
|
|||
assertAcked(indicesAdmin().prepareOpen(indexName));
|
||||
ensureYellowAndNoInitializingShards(indexName);
|
||||
}
|
||||
assertHitCount(client().prepareSearch(indexName).setSize(0), numOfDocs);
|
||||
assertHitCount(prepareSearch(indexName).setSize(0), numOfDocs);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -132,7 +132,7 @@ public class CloseIndexIT extends ESIntegTestCase {
|
|||
assertIndexIsClosed(indexName);
|
||||
|
||||
assertAcked(indicesAdmin().prepareOpen(indexName));
|
||||
assertHitCount(client().prepareSearch(indexName).setSize(0), nbDocs);
|
||||
assertHitCount(prepareSearch(indexName).setSize(0), nbDocs);
|
||||
}
|
||||
|
||||
public void testCloseAlreadyClosedIndex() throws Exception {
|
||||
|
@ -244,7 +244,7 @@ public class CloseIndexIT extends ESIntegTestCase {
|
|||
|
||||
assertIndexIsClosed(indexName);
|
||||
assertAcked(indicesAdmin().prepareOpen(indexName));
|
||||
assertHitCount(client().prepareSearch(indexName).setSize(0).setTrackTotalHitsUpTo(TRACK_TOTAL_HITS_ACCURATE), nbDocs);
|
||||
assertHitCount(prepareSearch(indexName).setSize(0).setTrackTotalHitsUpTo(TRACK_TOTAL_HITS_ACCURATE), nbDocs);
|
||||
}
|
||||
|
||||
public void testCloseWhileDeletingIndices() throws Exception {
|
||||
|
@ -348,10 +348,7 @@ public class CloseIndexIT extends ESIntegTestCase {
|
|||
}
|
||||
refresh(indexName);
|
||||
assertIndexIsOpened(indexName);
|
||||
assertHitCount(
|
||||
client().prepareSearch(indexName).setSize(0).setTrackTotalHitsUpTo(TRACK_TOTAL_HITS_ACCURATE),
|
||||
indexer.totalIndexedDocs()
|
||||
);
|
||||
assertHitCount(prepareSearch(indexName).setSize(0).setTrackTotalHitsUpTo(TRACK_TOTAL_HITS_ACCURATE), indexer.totalIndexedDocs());
|
||||
}
|
||||
|
||||
public void testCloseIndexWaitForActiveShards() throws Exception {
|
||||
|
|
|
@ -246,7 +246,7 @@ public class CloseWhileRelocatingShardsIT extends ESIntegTestCase {
|
|||
ensureGreen(indices);
|
||||
|
||||
for (String index : acknowledgedCloses) {
|
||||
long docsCount = client().prepareSearch(index).setSize(0).setTrackTotalHits(true).get().getHits().getTotalHits().value;
|
||||
long docsCount = prepareSearch(index).setSize(0).setTrackTotalHits(true).get().getHits().getTotalHits().value;
|
||||
assertEquals(
|
||||
"Expected "
|
||||
+ docsPerIndex.get(index)
|
||||
|
|
|
@ -374,7 +374,7 @@ public class IndexStatsIT extends ESIntegTestCase {
|
|||
assertThat(indicesAdmin().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(), equalTo(0L));
|
||||
for (int i = 0; i < 10; i++) {
|
||||
assertThat(
|
||||
client().prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).get().getHits().getTotalHits().value,
|
||||
prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).get().getHits().getTotalHits().value,
|
||||
equalTo((long) numDocs)
|
||||
);
|
||||
assertThat(
|
||||
|
@ -409,7 +409,7 @@ public class IndexStatsIT extends ESIntegTestCase {
|
|||
|
||||
for (int i = 0; i < 10; i++) {
|
||||
assertThat(
|
||||
client().prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).get().getHits().getTotalHits().value,
|
||||
prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).get().getHits().getTotalHits().value,
|
||||
equalTo((long) numDocs)
|
||||
);
|
||||
assertThat(
|
||||
|
@ -427,8 +427,7 @@ public class IndexStatsIT extends ESIntegTestCase {
|
|||
// test explicit request parameter
|
||||
|
||||
assertThat(
|
||||
client().prepareSearch("idx")
|
||||
.setSearchType(SearchType.QUERY_THEN_FETCH)
|
||||
prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH)
|
||||
.setSize(0)
|
||||
.setRequestCache(false)
|
||||
.get()
|
||||
|
@ -442,8 +441,7 @@ public class IndexStatsIT extends ESIntegTestCase {
|
|||
);
|
||||
|
||||
assertThat(
|
||||
client().prepareSearch("idx")
|
||||
.setSearchType(SearchType.QUERY_THEN_FETCH)
|
||||
prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH)
|
||||
.setSize(0)
|
||||
.setRequestCache(true)
|
||||
.get()
|
||||
|
@ -462,7 +460,7 @@ public class IndexStatsIT extends ESIntegTestCase {
|
|||
updateIndexSettings(Settings.builder().put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), false), "idx");
|
||||
|
||||
assertThat(
|
||||
client().prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).get().getHits().getTotalHits().value,
|
||||
prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).get().getHits().getTotalHits().value,
|
||||
equalTo((long) numDocs)
|
||||
);
|
||||
assertThat(
|
||||
|
@ -471,8 +469,7 @@ public class IndexStatsIT extends ESIntegTestCase {
|
|||
);
|
||||
|
||||
assertThat(
|
||||
client().prepareSearch("idx")
|
||||
.setSearchType(SearchType.QUERY_THEN_FETCH)
|
||||
prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH)
|
||||
.setSize(0)
|
||||
.setRequestCache(true)
|
||||
.get()
|
||||
|
@ -1013,7 +1010,7 @@ public class IndexStatsIT extends ESIntegTestCase {
|
|||
client().prepareIndex("test1").setId(Integer.toString(1)).setSource("foo", "bar").execute().actionGet();
|
||||
refresh();
|
||||
|
||||
client().prepareSearch("_all").setStats("bar", "baz").execute().actionGet();
|
||||
prepareSearch("_all").setStats("bar", "baz").execute().actionGet();
|
||||
|
||||
IndicesStatsRequestBuilder builder = indicesAdmin().prepareStats();
|
||||
IndicesStatsResponse stats = builder.execute().actionGet();
|
||||
|
@ -1163,9 +1160,7 @@ public class IndexStatsIT extends ESIntegTestCase {
|
|||
// the query cache has an optimization that disables it automatically if there is contention,
|
||||
// so we run it in an assertBusy block which should eventually succeed
|
||||
assertBusy(() -> {
|
||||
assertNoFailures(
|
||||
client().prepareSearch("index").setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.matchQuery("foo", "baz")))
|
||||
);
|
||||
assertNoFailures(prepareSearch("index").setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.matchQuery("foo", "baz"))));
|
||||
IndicesStatsResponse stats = indicesAdmin().prepareStats("index").setQueryCache(true).get();
|
||||
assertCumulativeQueryCacheStats(stats);
|
||||
assertThat(stats.getTotal().queryCache.getHitCount(), equalTo(0L));
|
||||
|
@ -1174,9 +1169,7 @@ public class IndexStatsIT extends ESIntegTestCase {
|
|||
});
|
||||
|
||||
assertBusy(() -> {
|
||||
assertNoFailures(
|
||||
client().prepareSearch("index").setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.matchQuery("foo", "baz")))
|
||||
);
|
||||
assertNoFailures(prepareSearch("index").setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.matchQuery("foo", "baz"))));
|
||||
IndicesStatsResponse stats = indicesAdmin().prepareStats("index").setQueryCache(true).get();
|
||||
assertCumulativeQueryCacheStats(stats);
|
||||
assertThat(stats.getTotal().queryCache.getHitCount(), greaterThan(0L));
|
||||
|
@ -1224,9 +1217,7 @@ public class IndexStatsIT extends ESIntegTestCase {
|
|||
);
|
||||
|
||||
assertBusy(() -> {
|
||||
assertNoFailures(
|
||||
client().prepareSearch("index").setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.matchQuery("foo", "baz")))
|
||||
);
|
||||
assertNoFailures(prepareSearch("index").setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.matchQuery("foo", "baz"))));
|
||||
IndicesStatsResponse stats = indicesAdmin().prepareStats("index").setQueryCache(true).get();
|
||||
assertCumulativeQueryCacheStats(stats);
|
||||
assertThat(stats.getTotal().queryCache.getHitCount(), greaterThan(0L));
|
||||
|
|
|
@ -152,8 +152,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
|
|||
client().prepareIndex("test_index").setId("1").setSource("field1", "value1", "field2", "value 2").setRefreshPolicy(IMMEDIATE).get();
|
||||
|
||||
ensureGreen();
|
||||
SearchResponse searchResponse = client().prepareSearch("test_index")
|
||||
.setQuery(termQuery("field1", "value1"))
|
||||
SearchResponse searchResponse = prepareSearch("test_index").setQuery(termQuery("field1", "value1"))
|
||||
.addStoredField("field1")
|
||||
.addStoredField("field2")
|
||||
.execute()
|
||||
|
@ -168,8 +167,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
|
|||
|
||||
ensureGreen();
|
||||
// now only match on one template (template_1)
|
||||
searchResponse = client().prepareSearch("text_index")
|
||||
.setQuery(termQuery("field1", "value1"))
|
||||
searchResponse = prepareSearch("text_index").setQuery(termQuery("field1", "value1"))
|
||||
.addStoredField("field1")
|
||||
.addStoredField("field2")
|
||||
.execute()
|
||||
|
@ -512,16 +510,16 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
|
|||
|
||||
refresh();
|
||||
|
||||
assertHitCount(client().prepareSearch("test_index"), 5L);
|
||||
assertHitCount(client().prepareSearch("simple_alias"), 5L);
|
||||
assertHitCount(client().prepareSearch("templated_alias-test_index"), 5L);
|
||||
assertHitCount(prepareSearch("test_index"), 5L);
|
||||
assertHitCount(prepareSearch("simple_alias"), 5L);
|
||||
assertHitCount(prepareSearch("templated_alias-test_index"), 5L);
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("filtered_alias").get();
|
||||
SearchResponse searchResponse = prepareSearch("filtered_alias").get();
|
||||
assertHitCount(searchResponse, 1L);
|
||||
assertThat(searchResponse.getHits().getAt(0).getSourceAsMap().get("type"), equalTo("type2"));
|
||||
|
||||
// Search the complex filter alias
|
||||
searchResponse = client().prepareSearch("complex_filtered_alias").get();
|
||||
searchResponse = prepareSearch("complex_filtered_alias").get();
|
||||
assertHitCount(searchResponse, 3L);
|
||||
|
||||
Set<String> types = new HashSet<>();
|
||||
|
@ -558,9 +556,9 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
|
|||
client().prepareIndex("test_index").setId("2").setSource("field", "value2").get();
|
||||
refresh();
|
||||
|
||||
assertHitCount(client().prepareSearch("test_index"), 2L);
|
||||
assertHitCount(prepareSearch("test_index"), 2L);
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("my_alias").get();
|
||||
SearchResponse searchResponse = prepareSearch("my_alias").get();
|
||||
assertHitCount(searchResponse, 1L);
|
||||
assertThat(searchResponse.getHits().getAt(0).getSourceAsMap().get("field"), equalTo("value2"));
|
||||
}
|
||||
|
@ -593,10 +591,10 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
|
|||
client().prepareIndex("test_index").setId("2").setSource("field", "value2").get();
|
||||
refresh();
|
||||
|
||||
assertHitCount(client().prepareSearch("test_index"), 2L);
|
||||
assertHitCount(client().prepareSearch("alias1"), 2L);
|
||||
assertHitCount(prepareSearch("test_index"), 2L);
|
||||
assertHitCount(prepareSearch("alias1"), 2L);
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("alias2").get();
|
||||
SearchResponse searchResponse = prepareSearch("alias2").get();
|
||||
assertHitCount(searchResponse, 1L);
|
||||
assertThat(searchResponse.getHits().getAt(0).getSourceAsMap().get("field"), equalTo("value2"));
|
||||
}
|
||||
|
@ -852,8 +850,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
|
|||
ensureGreen();
|
||||
|
||||
// ax -> matches template
|
||||
SearchResponse searchResponse = client().prepareSearch("ax")
|
||||
.setQuery(termQuery("field1", "value1"))
|
||||
SearchResponse searchResponse = prepareSearch("ax").setQuery(termQuery("field1", "value1"))
|
||||
.addStoredField("field1")
|
||||
.addStoredField("field2")
|
||||
.execute()
|
||||
|
@ -864,8 +861,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
|
|||
assertNull(searchResponse.getHits().getAt(0).field("field2"));
|
||||
|
||||
// bx -> matches template
|
||||
searchResponse = client().prepareSearch("bx")
|
||||
.setQuery(termQuery("field1", "value1"))
|
||||
searchResponse = prepareSearch("bx").setQuery(termQuery("field1", "value1"))
|
||||
.addStoredField("field1")
|
||||
.addStoredField("field2")
|
||||
.execute()
|
||||
|
|
|
@ -133,7 +133,7 @@ public class RelocationIT extends ESIntegTestCase {
|
|||
|
||||
logger.info("--> verifying count");
|
||||
indicesAdmin().prepareRefresh().execute().actionGet();
|
||||
assertThat(client().prepareSearch("test").setSize(0).execute().actionGet().getHits().getTotalHits().value, equalTo(20L));
|
||||
assertThat(prepareSearch("test").setSize(0).execute().actionGet().getHits().getTotalHits().value, equalTo(20L));
|
||||
|
||||
logger.info("--> start another node");
|
||||
final String node_2 = internalCluster().startNode();
|
||||
|
@ -157,7 +157,7 @@ public class RelocationIT extends ESIntegTestCase {
|
|||
|
||||
logger.info("--> verifying count again...");
|
||||
indicesAdmin().prepareRefresh().execute().actionGet();
|
||||
assertThat(client().prepareSearch("test").setSize(0).execute().actionGet().getHits().getTotalHits().value, equalTo(20L));
|
||||
assertThat(prepareSearch("test").setSize(0).execute().actionGet().getHits().getTotalHits().value, equalTo(20L));
|
||||
}
|
||||
|
||||
public void testRelocationWhileIndexingRandom() throws Exception {
|
||||
|
@ -236,8 +236,7 @@ public class RelocationIT extends ESIntegTestCase {
|
|||
boolean ranOnce = false;
|
||||
for (int i = 0; i < 10; i++) {
|
||||
logger.info("--> START search test round {}", i + 1);
|
||||
SearchHits hits = client().prepareSearch("test")
|
||||
.setQuery(matchAllQuery())
|
||||
SearchHits hits = prepareSearch("test").setQuery(matchAllQuery())
|
||||
.setSize((int) indexer.totalIndexedDocs())
|
||||
.storedFields()
|
||||
.execute()
|
||||
|
@ -486,7 +485,7 @@ public class RelocationIT extends ESIntegTestCase {
|
|||
for (int i = 0; i < searchThreads.length; i++) {
|
||||
searchThreads[i] = new Thread(() -> {
|
||||
while (stopped.get() == false) {
|
||||
assertNoFailures(client().prepareSearch("test").setRequestCache(false));
|
||||
assertNoFailures(prepareSearch("test").setRequestCache(false));
|
||||
}
|
||||
});
|
||||
searchThreads[i].start();
|
||||
|
@ -501,7 +500,7 @@ public class RelocationIT extends ESIntegTestCase {
|
|||
docs[i] = client().prepareIndex("test").setId(id).setSource("field1", English.intToEnglish(i));
|
||||
}
|
||||
indexRandom(true, docs);
|
||||
assertHitCount(client().prepareSearch("test"), numDocs);
|
||||
assertHitCount(prepareSearch("test"), numDocs);
|
||||
|
||||
logger.info(" --> moving index to new nodes");
|
||||
updateIndexSettings(
|
||||
|
@ -580,7 +579,7 @@ public class RelocationIT extends ESIntegTestCase {
|
|||
|
||||
logger.info("--> verifying count");
|
||||
indicesAdmin().prepareRefresh().execute().actionGet();
|
||||
assertThat(client().prepareSearch("test").setSize(0).execute().actionGet().getHits().getTotalHits().value, equalTo(20L));
|
||||
assertThat(prepareSearch("test").setSize(0).execute().actionGet().getHits().getTotalHits().value, equalTo(20L));
|
||||
}
|
||||
|
||||
public void testRelocateWhileContinuouslyIndexingAndWaitingForRefresh() throws Exception {
|
||||
|
@ -650,7 +649,7 @@ public class RelocationIT extends ESIntegTestCase {
|
|||
assertTrue(pendingIndexResponses.stream().allMatch(ActionFuture::isDone));
|
||||
}, 1, TimeUnit.MINUTES);
|
||||
|
||||
assertThat(client().prepareSearch("test").setSize(0).execute().actionGet().getHits().getTotalHits().value, equalTo(120L));
|
||||
assertThat(prepareSearch("test").setSize(0).execute().actionGet().getHits().getTotalHits().value, equalTo(120L));
|
||||
}
|
||||
|
||||
public void testRelocationEstablishedPeerRecoveryRetentionLeases() throws Exception {
|
||||
|
|
|
@ -152,18 +152,12 @@ public class AliasRoutingIT extends ESIntegTestCase {
|
|||
);
|
||||
|
||||
assertThat(
|
||||
client().prepareSearch("alias1")
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.execute()
|
||||
.actionGet()
|
||||
.getHits()
|
||||
.getTotalHits().value,
|
||||
prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value,
|
||||
equalTo(0L)
|
||||
);
|
||||
|
||||
assertThat(
|
||||
client().prepareSearch("alias1")
|
||||
.setSize(0)
|
||||
prepareSearch("alias1").setSize(0)
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.execute()
|
||||
.actionGet()
|
||||
|
@ -198,17 +192,11 @@ public class AliasRoutingIT extends ESIntegTestCase {
|
|||
equalTo(1L)
|
||||
);
|
||||
assertThat(
|
||||
client().prepareSearch("alias0")
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.execute()
|
||||
.actionGet()
|
||||
.getHits()
|
||||
.getTotalHits().value,
|
||||
prepareSearch("alias0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value,
|
||||
equalTo(1L)
|
||||
);
|
||||
assertThat(
|
||||
client().prepareSearch("alias0")
|
||||
.setSize(0)
|
||||
prepareSearch("alias0").setSize(0)
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.execute()
|
||||
.actionGet()
|
||||
|
@ -263,17 +251,11 @@ public class AliasRoutingIT extends ESIntegTestCase {
|
|||
equalTo(1L)
|
||||
);
|
||||
assertThat(
|
||||
client().prepareSearch("alias0")
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.execute()
|
||||
.actionGet()
|
||||
.getHits()
|
||||
.getTotalHits().value,
|
||||
prepareSearch("alias0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value,
|
||||
equalTo(1L)
|
||||
);
|
||||
assertThat(
|
||||
client().prepareSearch("alias0")
|
||||
.setSize(0)
|
||||
prepareSearch("alias0").setSize(0)
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.execute()
|
||||
.actionGet()
|
||||
|
@ -307,17 +289,11 @@ public class AliasRoutingIT extends ESIntegTestCase {
|
|||
equalTo(1L)
|
||||
);
|
||||
assertThat(
|
||||
client().prepareSearch("alias1")
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.execute()
|
||||
.actionGet()
|
||||
.getHits()
|
||||
.getTotalHits().value,
|
||||
prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value,
|
||||
equalTo(1L)
|
||||
);
|
||||
assertThat(
|
||||
client().prepareSearch("alias1")
|
||||
.setSize(0)
|
||||
prepareSearch("alias1").setSize(0)
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.execute()
|
||||
.actionGet()
|
||||
|
@ -351,17 +327,11 @@ public class AliasRoutingIT extends ESIntegTestCase {
|
|||
equalTo(2L)
|
||||
);
|
||||
assertThat(
|
||||
client().prepareSearch("alias01")
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.execute()
|
||||
.actionGet()
|
||||
.getHits()
|
||||
.getTotalHits().value,
|
||||
prepareSearch("alias01").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value,
|
||||
equalTo(2L)
|
||||
);
|
||||
assertThat(
|
||||
client().prepareSearch("alias01")
|
||||
.setSize(0)
|
||||
prepareSearch("alias01").setSize(0)
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.execute()
|
||||
.actionGet()
|
||||
|
@ -511,17 +481,11 @@ public class AliasRoutingIT extends ESIntegTestCase {
|
|||
logger.info("--> search with alias-ab, should find two");
|
||||
for (int i = 0; i < 5; i++) {
|
||||
assertThat(
|
||||
client().prepareSearch("alias-ab")
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.execute()
|
||||
.actionGet()
|
||||
.getHits()
|
||||
.getTotalHits().value,
|
||||
prepareSearch("alias-ab").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value,
|
||||
equalTo(2L)
|
||||
);
|
||||
assertThat(
|
||||
client().prepareSearch("alias-ab")
|
||||
.setSize(0)
|
||||
prepareSearch("alias-ab").setSize(0)
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.execute()
|
||||
.actionGet()
|
||||
|
@ -574,12 +538,7 @@ public class AliasRoutingIT extends ESIntegTestCase {
|
|||
logger.info("--> search all on index_* should find two");
|
||||
for (int i = 0; i < 5; i++) {
|
||||
assertThat(
|
||||
client().prepareSearch("index_*")
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.execute()
|
||||
.actionGet()
|
||||
.getHits()
|
||||
.getTotalHits().value,
|
||||
prepareSearch("index_*").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value,
|
||||
equalTo(2L)
|
||||
);
|
||||
}
|
||||
|
@ -602,8 +561,7 @@ public class AliasRoutingIT extends ESIntegTestCase {
|
|||
logger.info("--> indexing on index_2 which is a concrete index");
|
||||
client().prepareIndex("index_2").setId("2").setSource("field", "value2").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get();
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch("index_*")
|
||||
.setSearchType(SearchType.QUERY_THEN_FETCH)
|
||||
SearchResponse searchResponse = prepareSearch("index_*").setSearchType(SearchType.QUERY_THEN_FETCH)
|
||||
.setSize(1)
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.execute()
|
||||
|
@ -630,17 +588,11 @@ public class AliasRoutingIT extends ESIntegTestCase {
|
|||
for (int i = 0; i < 5; i++) {
|
||||
assertThat(client().prepareGet("test", "0").setRouting("3").execute().actionGet().isExists(), equalTo(true));
|
||||
assertThat(
|
||||
client().prepareSearch("alias")
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.execute()
|
||||
.actionGet()
|
||||
.getHits()
|
||||
.getTotalHits().value,
|
||||
prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value,
|
||||
equalTo(1L)
|
||||
);
|
||||
assertThat(
|
||||
client().prepareSearch("alias")
|
||||
.setSize(0)
|
||||
prepareSearch("alias").setSize(0)
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.execute()
|
||||
.actionGet()
|
||||
|
@ -656,17 +608,11 @@ public class AliasRoutingIT extends ESIntegTestCase {
|
|||
logger.info("--> verifying search with wrong routing should not find");
|
||||
for (int i = 0; i < 5; i++) {
|
||||
assertThat(
|
||||
client().prepareSearch("alias")
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.execute()
|
||||
.actionGet()
|
||||
.getHits()
|
||||
.getTotalHits().value,
|
||||
prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value,
|
||||
equalTo(0L)
|
||||
);
|
||||
assertThat(
|
||||
client().prepareSearch("alias")
|
||||
.setSize(0)
|
||||
prepareSearch("alias").setSize(0)
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.execute()
|
||||
.actionGet()
|
||||
|
@ -691,17 +637,11 @@ public class AliasRoutingIT extends ESIntegTestCase {
|
|||
assertThat(client().prepareGet("test", "0").setRouting("3").execute().actionGet().isExists(), equalTo(true));
|
||||
assertThat(client().prepareGet("test", "1").setRouting("4").execute().actionGet().isExists(), equalTo(true));
|
||||
assertThat(
|
||||
client().prepareSearch("alias")
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.execute()
|
||||
.actionGet()
|
||||
.getHits()
|
||||
.getTotalHits().value,
|
||||
prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value,
|
||||
equalTo(2L)
|
||||
);
|
||||
assertThat(
|
||||
client().prepareSearch("alias")
|
||||
.setSize(0)
|
||||
prepareSearch("alias").setSize(0)
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.execute()
|
||||
.actionGet()
|
||||
|
|
|
@ -65,9 +65,9 @@ public class SearchCancellationIT extends AbstractSearchCancellationTestCase {
|
|||
indexTestData();
|
||||
|
||||
logger.info("Executing search");
|
||||
ActionFuture<SearchResponse> searchResponse = client().prepareSearch("test")
|
||||
.setQuery(scriptQuery(new Script(ScriptType.INLINE, "mockscript", SEARCH_BLOCK_SCRIPT_NAME, Collections.emptyMap())))
|
||||
.execute();
|
||||
ActionFuture<SearchResponse> searchResponse = prepareSearch("test").setQuery(
|
||||
scriptQuery(new Script(ScriptType.INLINE, "mockscript", SEARCH_BLOCK_SCRIPT_NAME, Collections.emptyMap()))
|
||||
).execute();
|
||||
|
||||
awaitForBlock(plugins);
|
||||
cancelSearch(SearchAction.NAME);
|
||||
|
@ -82,9 +82,10 @@ public class SearchCancellationIT extends AbstractSearchCancellationTestCase {
|
|||
indexTestData();
|
||||
|
||||
logger.info("Executing search");
|
||||
ActionFuture<SearchResponse> searchResponse = client().prepareSearch("test")
|
||||
.addScriptField("test_field", new Script(ScriptType.INLINE, "mockscript", SEARCH_BLOCK_SCRIPT_NAME, Collections.emptyMap()))
|
||||
.execute();
|
||||
ActionFuture<SearchResponse> searchResponse = prepareSearch("test").addScriptField(
|
||||
"test_field",
|
||||
new Script(ScriptType.INLINE, "mockscript", SEARCH_BLOCK_SCRIPT_NAME, Collections.emptyMap())
|
||||
).execute();
|
||||
|
||||
awaitForBlock(plugins);
|
||||
cancelSearch(SearchAction.NAME);
|
||||
|
@ -110,8 +111,7 @@ public class SearchCancellationIT extends AbstractSearchCancellationTestCase {
|
|||
termsAggregationBuilder.field("field.keyword");
|
||||
}
|
||||
|
||||
ActionFuture<SearchResponse> searchResponse = client().prepareSearch("test")
|
||||
.setQuery(matchAllQuery())
|
||||
ActionFuture<SearchResponse> searchResponse = prepareSearch("test").setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
termsAggregationBuilder.subAggregation(
|
||||
new ScriptedMetricAggregationBuilder("sub_agg").initScript(
|
||||
|
@ -144,8 +144,7 @@ public class SearchCancellationIT extends AbstractSearchCancellationTestCase {
|
|||
indexTestData();
|
||||
|
||||
logger.info("Executing search");
|
||||
ActionFuture<SearchResponse> searchResponse = client().prepareSearch("test")
|
||||
.setScroll(TimeValue.timeValueSeconds(10))
|
||||
ActionFuture<SearchResponse> searchResponse = prepareSearch("test").setScroll(TimeValue.timeValueSeconds(10))
|
||||
.setSize(5)
|
||||
.setQuery(scriptQuery(new Script(ScriptType.INLINE, "mockscript", SEARCH_BLOCK_SCRIPT_NAME, Collections.emptyMap())))
|
||||
.execute();
|
||||
|
@ -171,8 +170,7 @@ public class SearchCancellationIT extends AbstractSearchCancellationTestCase {
|
|||
|
||||
logger.info("Executing search");
|
||||
TimeValue keepAlive = TimeValue.timeValueSeconds(5);
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setScroll(keepAlive)
|
||||
SearchResponse searchResponse = prepareSearch("test").setScroll(keepAlive)
|
||||
.setSize(2)
|
||||
.setQuery(scriptQuery(new Script(ScriptType.INLINE, "mockscript", SEARCH_BLOCK_SCRIPT_NAME, Collections.emptyMap())))
|
||||
.get();
|
||||
|
@ -209,11 +207,10 @@ public class SearchCancellationIT extends AbstractSearchCancellationTestCase {
|
|||
indexTestData();
|
||||
ActionFuture<MultiSearchResponse> msearchResponse = client().prepareMultiSearch()
|
||||
.add(
|
||||
client().prepareSearch("test")
|
||||
.addScriptField(
|
||||
"test_field",
|
||||
new Script(ScriptType.INLINE, "mockscript", SEARCH_BLOCK_SCRIPT_NAME, Collections.emptyMap())
|
||||
)
|
||||
prepareSearch("test").addScriptField(
|
||||
"test_field",
|
||||
new Script(ScriptType.INLINE, "mockscript", SEARCH_BLOCK_SCRIPT_NAME, Collections.emptyMap())
|
||||
)
|
||||
)
|
||||
.execute();
|
||||
awaitForBlock(plugins);
|
||||
|
@ -244,8 +241,7 @@ public class SearchCancellationIT extends AbstractSearchCancellationTestCase {
|
|||
Thread searchThread = new Thread(() -> {
|
||||
SearchPhaseExecutionException e = expectThrows(
|
||||
SearchPhaseExecutionException.class,
|
||||
() -> client().prepareSearch("test")
|
||||
.setSearchType(SearchType.QUERY_THEN_FETCH)
|
||||
() -> prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH)
|
||||
.setQuery(scriptQuery(new Script(ScriptType.INLINE, "mockscript", SEARCH_BLOCK_SCRIPT_NAME, Collections.emptyMap())))
|
||||
.setAllowPartialSearchResults(false)
|
||||
.setSize(1000)
|
||||
|
|
|
@ -73,7 +73,7 @@ public class SearchServiceCleanupOnLostMasterIT extends ESIntegTestCase {
|
|||
|
||||
index("test", "test", "{}");
|
||||
|
||||
assertThat(client().prepareSearch("test").setScroll("30m").get().getScrollId(), is(notNullValue()));
|
||||
assertThat(prepareSearch("test").setScroll("30m").get().getScrollId(), is(notNullValue()));
|
||||
|
||||
loseMaster.accept(master, dataNode);
|
||||
// in the past, this failed because the search context for the scroll would prevent the shard lock from being released.
|
||||
|
|
|
@ -55,8 +55,7 @@ public class SearchTimeoutIT extends ESIntegTestCase {
|
|||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/98369")
|
||||
public void testTopHitsTimeout() {
|
||||
indexDocs();
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setTimeout(new TimeValue(10, TimeUnit.MILLISECONDS))
|
||||
SearchResponse searchResponse = prepareSearch("test").setTimeout(new TimeValue(10, TimeUnit.MILLISECONDS))
|
||||
.setQuery(scriptQuery(new Script(ScriptType.INLINE, "mockscript", SCRIPT_NAME, Collections.emptyMap())))
|
||||
.get();
|
||||
assertThat(searchResponse.isTimedOut(), equalTo(true));
|
||||
|
@ -71,8 +70,7 @@ public class SearchTimeoutIT extends ESIntegTestCase {
|
|||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/98053")
|
||||
public void testAggsTimeout() {
|
||||
indexDocs();
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setTimeout(new TimeValue(10, TimeUnit.MILLISECONDS))
|
||||
SearchResponse searchResponse = prepareSearch("test").setTimeout(new TimeValue(10, TimeUnit.MILLISECONDS))
|
||||
.setSize(0)
|
||||
.setQuery(scriptQuery(new Script(ScriptType.INLINE, "mockscript", SCRIPT_NAME, Collections.emptyMap())))
|
||||
.addAggregation(new TermsAggregationBuilder("terms").field("field.keyword"))
|
||||
|
@ -96,8 +94,7 @@ public class SearchTimeoutIT extends ESIntegTestCase {
|
|||
|
||||
ElasticsearchException ex = expectThrows(
|
||||
ElasticsearchException.class,
|
||||
() -> client().prepareSearch("test")
|
||||
.setTimeout(new TimeValue(10, TimeUnit.MILLISECONDS))
|
||||
() -> prepareSearch("test").setTimeout(new TimeValue(10, TimeUnit.MILLISECONDS))
|
||||
.setQuery(scriptQuery(new Script(ScriptType.INLINE, "mockscript", SCRIPT_NAME, Collections.emptyMap())))
|
||||
.setAllowPartialSearchResults(false) // this line causes timeouts to report failures
|
||||
.get()
|
||||
|
|
|
@ -43,7 +43,7 @@ public class StressSearchServiceReaperIT extends ESIntegTestCase {
|
|||
indexRandom(true, builders);
|
||||
final int iterations = scaledRandomIntBetween(500, 1000);
|
||||
for (int i = 0; i < iterations; i++) {
|
||||
assertHitCountAndNoFailures(client().prepareSearch("test").setQuery(matchAllQuery()).setSize(num), num);
|
||||
assertHitCountAndNoFailures(prepareSearch("test").setQuery(matchAllQuery()).setSize(num), num);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -39,8 +39,7 @@ public class AggregationsIntegrationIT extends ESIntegTestCase {
|
|||
|
||||
public void testScroll() {
|
||||
final int size = randomIntBetween(1, 4);
|
||||
SearchResponse response = client().prepareSearch("index")
|
||||
.setSize(size)
|
||||
SearchResponse response = prepareSearch("index").setSize(size)
|
||||
.setScroll(TimeValue.timeValueMinutes(1))
|
||||
.addAggregation(terms("f").field("f"))
|
||||
.get();
|
||||
|
|
|
@ -61,8 +61,7 @@ public class CombiIT extends ESIntegTestCase {
|
|||
ensureSearchable();
|
||||
|
||||
SubAggCollectionMode aggCollectionMode = randomFrom(SubAggCollectionMode.values());
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(missing("missing_values").field("value"))
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(missing("missing_values").field("value"))
|
||||
.addAggregation(terms("values").field("value").collectMode(aggCollectionMode))
|
||||
.get();
|
||||
|
||||
|
@ -109,11 +108,9 @@ public class CombiIT extends ESIntegTestCase {
|
|||
ensureSearchable("idx");
|
||||
|
||||
SubAggCollectionMode aggCollectionMode = randomFrom(SubAggCollectionMode.values());
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
histogram("values").field("value1").interval(1).subAggregation(terms("names").field("name").collectMode(aggCollectionMode))
|
||||
)
|
||||
.get();
|
||||
SearchResponse searchResponse = prepareSearch("idx").addAggregation(
|
||||
histogram("values").field("value1").interval(1).subAggregation(terms("names").field("name").collectMode(aggCollectionMode))
|
||||
).get();
|
||||
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, Matchers.equalTo(0L));
|
||||
Histogram values = searchResponse.getAggregations().get("values");
|
||||
|
|
|
@ -152,7 +152,7 @@ public class EquivalenceIT extends ESIntegTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
SearchRequestBuilder reqBuilder = client().prepareSearch("idx").addAggregation(query);
|
||||
SearchRequestBuilder reqBuilder = prepareSearch("idx").addAggregation(query);
|
||||
for (int i = 0; i < ranges.length; ++i) {
|
||||
RangeQueryBuilder filter = QueryBuilders.rangeQuery("values");
|
||||
if (ranges[i][0] != Double.NEGATIVE_INFINITY) {
|
||||
|
@ -254,13 +254,12 @@ public class EquivalenceIT extends ESIntegTestCase {
|
|||
|
||||
assertNoFailures(indicesAdmin().prepareRefresh("idx").setIndicesOptions(IndicesOptions.lenientExpandOpen()).execute().get());
|
||||
|
||||
SearchResponse resp = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
terms("long").field("long_values")
|
||||
.size(maxNumTerms)
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.subAggregation(min("min").field("num"))
|
||||
)
|
||||
SearchResponse resp = prepareSearch("idx").addAggregation(
|
||||
terms("long").field("long_values")
|
||||
.size(maxNumTerms)
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.subAggregation(min("min").field("num"))
|
||||
)
|
||||
.addAggregation(
|
||||
terms("double").field("double_values")
|
||||
.size(maxNumTerms)
|
||||
|
@ -356,15 +355,12 @@ public class EquivalenceIT extends ESIntegTestCase {
|
|||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("interval", interval);
|
||||
|
||||
SearchResponse resp = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
terms("terms").field("values")
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "floor(_value / interval)", params))
|
||||
.size(maxNumTerms)
|
||||
)
|
||||
.addAggregation(histogram("histo").field("values").interval(interval).minDocCount(1))
|
||||
.get();
|
||||
SearchResponse resp = prepareSearch("idx").addAggregation(
|
||||
terms("terms").field("values")
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "floor(_value / interval)", params))
|
||||
.size(maxNumTerms)
|
||||
).addAggregation(histogram("histo").field("values").interval(interval).minDocCount(1)).get();
|
||||
|
||||
assertNoFailures(resp);
|
||||
|
||||
|
@ -402,13 +398,11 @@ public class EquivalenceIT extends ESIntegTestCase {
|
|||
}
|
||||
indexRandom(true, indexingRequests);
|
||||
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
terms("terms").field("double_value")
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.subAggregation(percentiles("pcts").field("double_value"))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
terms("terms").field("double_value")
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.subAggregation(percentiles("pcts").field("double_value"))
|
||||
).get();
|
||||
assertAllSuccessful(response);
|
||||
assertEquals(numDocs, response.getHits().getTotalHits().value);
|
||||
}
|
||||
|
@ -418,13 +412,11 @@ public class EquivalenceIT extends ESIntegTestCase {
|
|||
createIndex("idx");
|
||||
final int value = randomIntBetween(0, 10);
|
||||
indexRandom(true, client().prepareIndex("idx").setSource("f", value));
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
filter("filter", QueryBuilders.matchAllQuery()).subAggregation(
|
||||
range("range").field("f").addUnboundedTo(6).addUnboundedFrom(6).subAggregation(sum("sum").field("f"))
|
||||
)
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
filter("filter", QueryBuilders.matchAllQuery()).subAggregation(
|
||||
range("range").field("f").addUnboundedTo(6).addUnboundedFrom(6).subAggregation(sum("sum").field("f"))
|
||||
)
|
||||
.get();
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -481,29 +473,25 @@ public class EquivalenceIT extends ESIntegTestCase {
|
|||
}
|
||||
indexRandom(true, reqs);
|
||||
|
||||
final SearchResponse r1 = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
terms("f1").field("f1")
|
||||
.collectMode(SubAggCollectionMode.DEPTH_FIRST)
|
||||
.subAggregation(
|
||||
terms("f2").field("f2")
|
||||
.collectMode(SubAggCollectionMode.DEPTH_FIRST)
|
||||
.subAggregation(terms("f3").field("f3").collectMode(SubAggCollectionMode.DEPTH_FIRST))
|
||||
)
|
||||
)
|
||||
.get();
|
||||
final SearchResponse r1 = prepareSearch("idx").addAggregation(
|
||||
terms("f1").field("f1")
|
||||
.collectMode(SubAggCollectionMode.DEPTH_FIRST)
|
||||
.subAggregation(
|
||||
terms("f2").field("f2")
|
||||
.collectMode(SubAggCollectionMode.DEPTH_FIRST)
|
||||
.subAggregation(terms("f3").field("f3").collectMode(SubAggCollectionMode.DEPTH_FIRST))
|
||||
)
|
||||
).get();
|
||||
assertNoFailures(r1);
|
||||
final SearchResponse r2 = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
terms("f1").field("f1")
|
||||
.collectMode(SubAggCollectionMode.BREADTH_FIRST)
|
||||
.subAggregation(
|
||||
terms("f2").field("f2")
|
||||
.collectMode(SubAggCollectionMode.BREADTH_FIRST)
|
||||
.subAggregation(terms("f3").field("f3").collectMode(SubAggCollectionMode.BREADTH_FIRST))
|
||||
)
|
||||
)
|
||||
.get();
|
||||
final SearchResponse r2 = prepareSearch("idx").addAggregation(
|
||||
terms("f1").field("f1")
|
||||
.collectMode(SubAggCollectionMode.BREADTH_FIRST)
|
||||
.subAggregation(
|
||||
terms("f2").field("f2")
|
||||
.collectMode(SubAggCollectionMode.BREADTH_FIRST)
|
||||
.subAggregation(terms("f3").field("f3").collectMode(SubAggCollectionMode.BREADTH_FIRST))
|
||||
)
|
||||
).get();
|
||||
assertNoFailures(r2);
|
||||
|
||||
final Terms t1 = r1.getAggregations().get("f1");
|
||||
|
|
|
@ -39,12 +39,9 @@ public class MetadataIT extends ESIntegTestCase {
|
|||
final var nestedMetadata = Map.of("nested", "value");
|
||||
var metadata = Map.of("key", "value", "numeric", 1.2, "bool", true, "complex", nestedMetadata);
|
||||
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
terms("the_terms").setMetadata(metadata).field("name").subAggregation(sum("the_sum").setMetadata(metadata).field("value"))
|
||||
)
|
||||
.addAggregation(maxBucket("the_max_bucket", "the_terms>the_sum").setMetadata(metadata))
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
terms("the_terms").setMetadata(metadata).field("name").subAggregation(sum("the_sum").setMetadata(metadata).field("value"))
|
||||
).addAggregation(maxBucket("the_max_bucket", "the_terms>the_sum").setMetadata(metadata)).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
|
|
@ -54,9 +54,7 @@ public class MissingValueIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testUnmappedTerms() {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(terms("my_terms").field("non_existing_field").missing("bar"))
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(terms("my_terms").field("non_existing_field").missing("bar")).get();
|
||||
assertNoFailures(response);
|
||||
Terms terms = response.getAggregations().get("my_terms");
|
||||
assertEquals(1, terms.getBuckets().size());
|
||||
|
@ -65,16 +63,16 @@ public class MissingValueIT extends ESIntegTestCase {
|
|||
|
||||
public void testStringTerms() {
|
||||
for (ExecutionMode mode : ExecutionMode.values()) {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(terms("my_terms").field("str").executionHint(mode.toString()).missing("bar"))
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
terms("my_terms").field("str").executionHint(mode.toString()).missing("bar")
|
||||
).get();
|
||||
assertNoFailures(response);
|
||||
Terms terms = response.getAggregations().get("my_terms");
|
||||
assertEquals(2, terms.getBuckets().size());
|
||||
assertEquals(1, terms.getBucketByKey("foo").getDocCount());
|
||||
assertEquals(1, terms.getBucketByKey("bar").getDocCount());
|
||||
|
||||
response = client().prepareSearch("idx").addAggregation(terms("my_terms").field("str").missing("foo")).get();
|
||||
response = prepareSearch("idx").addAggregation(terms("my_terms").field("str").missing("foo")).get();
|
||||
assertNoFailures(response);
|
||||
terms = response.getAggregations().get("my_terms");
|
||||
assertEquals(1, terms.getBuckets().size());
|
||||
|
@ -83,14 +81,14 @@ public class MissingValueIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testLongTerms() {
|
||||
SearchResponse response = client().prepareSearch("idx").addAggregation(terms("my_terms").field("long").missing(4)).get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(terms("my_terms").field("long").missing(4)).get();
|
||||
assertNoFailures(response);
|
||||
Terms terms = response.getAggregations().get("my_terms");
|
||||
assertEquals(2, terms.getBuckets().size());
|
||||
assertEquals(1, terms.getBucketByKey("3").getDocCount());
|
||||
assertEquals(1, terms.getBucketByKey("4").getDocCount());
|
||||
|
||||
response = client().prepareSearch("idx").addAggregation(terms("my_terms").field("long").missing(3)).get();
|
||||
response = prepareSearch("idx").addAggregation(terms("my_terms").field("long").missing(3)).get();
|
||||
assertNoFailures(response);
|
||||
terms = response.getAggregations().get("my_terms");
|
||||
assertEquals(1, terms.getBuckets().size());
|
||||
|
@ -98,14 +96,14 @@ public class MissingValueIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testDoubleTerms() {
|
||||
SearchResponse response = client().prepareSearch("idx").addAggregation(terms("my_terms").field("double").missing(4.5)).get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(terms("my_terms").field("double").missing(4.5)).get();
|
||||
assertNoFailures(response);
|
||||
Terms terms = response.getAggregations().get("my_terms");
|
||||
assertEquals(2, terms.getBuckets().size());
|
||||
assertEquals(1, terms.getBucketByKey("4.5").getDocCount());
|
||||
assertEquals(1, terms.getBucketByKey("5.5").getDocCount());
|
||||
|
||||
response = client().prepareSearch("idx").addAggregation(terms("my_terms").field("double").missing(5.5)).get();
|
||||
response = prepareSearch("idx").addAggregation(terms("my_terms").field("double").missing(5.5)).get();
|
||||
assertNoFailures(response);
|
||||
terms = response.getAggregations().get("my_terms");
|
||||
assertEquals(1, terms.getBuckets().size());
|
||||
|
@ -113,9 +111,9 @@ public class MissingValueIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testUnmappedHistogram() {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(histogram("my_histogram").field("non-existing_field").interval(5).missing(12))
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
histogram("my_histogram").field("non-existing_field").interval(5).missing(12)
|
||||
).get();
|
||||
assertNoFailures(response);
|
||||
Histogram histogram = response.getAggregations().get("my_histogram");
|
||||
assertEquals(1, histogram.getBuckets().size());
|
||||
|
@ -124,9 +122,7 @@ public class MissingValueIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testHistogram() {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(histogram("my_histogram").field("long").interval(5).missing(7))
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(histogram("my_histogram").field("long").interval(5).missing(7)).get();
|
||||
assertNoFailures(response);
|
||||
Histogram histogram = response.getAggregations().get("my_histogram");
|
||||
assertEquals(2, histogram.getBuckets().size());
|
||||
|
@ -135,7 +131,7 @@ public class MissingValueIT extends ESIntegTestCase {
|
|||
assertEquals(5d, histogram.getBuckets().get(1).getKey());
|
||||
assertEquals(1, histogram.getBuckets().get(1).getDocCount());
|
||||
|
||||
response = client().prepareSearch("idx").addAggregation(histogram("my_histogram").field("long").interval(5).missing(3)).get();
|
||||
response = prepareSearch("idx").addAggregation(histogram("my_histogram").field("long").interval(5).missing(3)).get();
|
||||
assertNoFailures(response);
|
||||
histogram = response.getAggregations().get("my_histogram");
|
||||
assertEquals(1, histogram.getBuckets().size());
|
||||
|
@ -144,9 +140,9 @@ public class MissingValueIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testDateHistogram() {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(dateHistogram("my_histogram").field("date").calendarInterval(DateHistogramInterval.YEAR).missing("2014-05-07"))
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateHistogram("my_histogram").field("date").calendarInterval(DateHistogramInterval.YEAR).missing("2014-05-07")
|
||||
).get();
|
||||
assertNoFailures(response);
|
||||
Histogram histogram = response.getAggregations().get("my_histogram");
|
||||
assertEquals(2, histogram.getBuckets().size());
|
||||
|
@ -155,9 +151,9 @@ public class MissingValueIT extends ESIntegTestCase {
|
|||
assertEquals("2015-01-01T00:00:00.000Z", histogram.getBuckets().get(1).getKeyAsString());
|
||||
assertEquals(1, histogram.getBuckets().get(1).getDocCount());
|
||||
|
||||
response = client().prepareSearch("idx")
|
||||
.addAggregation(dateHistogram("my_histogram").field("date").calendarInterval(DateHistogramInterval.YEAR).missing("2015-05-07"))
|
||||
.get();
|
||||
response = prepareSearch("idx").addAggregation(
|
||||
dateHistogram("my_histogram").field("date").calendarInterval(DateHistogramInterval.YEAR).missing("2015-05-07")
|
||||
).get();
|
||||
assertNoFailures(response);
|
||||
histogram = response.getAggregations().get("my_histogram");
|
||||
assertEquals(1, histogram.getBuckets().size());
|
||||
|
@ -166,23 +162,21 @@ public class MissingValueIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testCardinality() {
|
||||
SearchResponse response = client().prepareSearch("idx").addAggregation(cardinality("card").field("long").missing(2)).get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(cardinality("card").field("long").missing(2)).get();
|
||||
assertNoFailures(response);
|
||||
Cardinality cardinality = response.getAggregations().get("card");
|
||||
assertEquals(2, cardinality.getValue());
|
||||
}
|
||||
|
||||
public void testPercentiles() {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(percentiles("percentiles").field("long").missing(1000))
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(percentiles("percentiles").field("long").missing(1000)).get();
|
||||
assertNoFailures(response);
|
||||
Percentiles percentiles = response.getAggregations().get("percentiles");
|
||||
assertEquals(1000, percentiles.percentile(100), 0);
|
||||
}
|
||||
|
||||
public void testStats() {
|
||||
SearchResponse response = client().prepareSearch("idx").addAggregation(stats("stats").field("long").missing(5)).get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(stats("stats").field("long").missing(5)).get();
|
||||
assertNoFailures(response);
|
||||
Stats stats = response.getAggregations().get("stats");
|
||||
assertEquals(2, stats.getCount());
|
||||
|
@ -190,9 +184,7 @@ public class MissingValueIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testUnmappedGeoBounds() {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(geoBounds("bounds").field("non_existing_field").missing("2,1"))
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(geoBounds("bounds").field("non_existing_field").missing("2,1")).get();
|
||||
assertNoFailures(response);
|
||||
GeoBounds bounds = response.getAggregations().get("bounds");
|
||||
assertThat(bounds.bottomRight().lat(), closeTo(2.0, 1E-5));
|
||||
|
@ -202,7 +194,7 @@ public class MissingValueIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testGeoBounds() {
|
||||
SearchResponse response = client().prepareSearch("idx").addAggregation(geoBounds("bounds").field("location").missing("2,1")).get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(geoBounds("bounds").field("location").missing("2,1")).get();
|
||||
assertNoFailures(response);
|
||||
GeoBounds bounds = response.getAggregations().get("bounds");
|
||||
assertThat(bounds.bottomRight().lat(), closeTo(1.0, 1E-5));
|
||||
|
@ -212,9 +204,7 @@ public class MissingValueIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testGeoCentroid() {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(geoCentroid("centroid").field("location").missing("2,1"))
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(geoCentroid("centroid").field("location").missing("2,1")).get();
|
||||
assertNoFailures(response);
|
||||
GeoCentroid centroid = response.getAggregations().get("centroid");
|
||||
GeoPoint point = new GeoPoint(1.5, 1.5);
|
||||
|
|
|
@ -73,11 +73,9 @@ public class BooleanTermsIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSingleValueField() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -107,11 +105,9 @@ public class BooleanTermsIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testMultiValueField() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(MULTI_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(MULTI_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -141,13 +137,11 @@ public class BooleanTermsIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testUnmapped() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx_unmapped")
|
||||
.addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME)
|
||||
.size(between(1, 5))
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx_unmapped").addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME)
|
||||
.size(between(1, 5))
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
|
|
@ -225,9 +225,9 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSingleValuedField() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH))
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH)
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -260,15 +260,9 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSingleValuedFieldWithTimeZone() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.DAY)
|
||||
.minDocCount(1)
|
||||
.timeZone(ZoneId.of("+01:00"))
|
||||
)
|
||||
.execute()
|
||||
.actionGet();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.DAY).minDocCount(1).timeZone(ZoneId.of("+01:00"))
|
||||
).execute().actionGet();
|
||||
ZoneId tz = ZoneId.of("+01:00");
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -328,11 +322,9 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
format = format + "||date_optional_time";
|
||||
}
|
||||
ZoneId tz = ZoneId.of("+01:00");
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.DAY).minDocCount(1).timeZone(tz).format(format)
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.DAY).minDocCount(1).timeZone(tz).format(format)
|
||||
).get();
|
||||
assertNoFailures(response);
|
||||
|
||||
Histogram histo = response.getAggregations().get("histo");
|
||||
|
@ -362,9 +354,9 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSingleValuedFieldOrderedByKeyAsc() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).order(BucketOrder.key(true)))
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).order(BucketOrder.key(true))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -382,11 +374,9 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSingleValuedFieldOrderedByKeyDesc() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).order(BucketOrder.key(false))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).order(BucketOrder.key(false))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -403,11 +393,9 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSingleValuedFieldOrderedByCountAsc() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).order(BucketOrder.count(true))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).order(BucketOrder.count(true))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -424,11 +412,9 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSingleValuedFieldOrderedByCountDesc() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).order(BucketOrder.count(false))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).order(BucketOrder.count(false))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -445,11 +431,9 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSingleValuedFieldWithSubAggregation() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).subAggregation(sum("sum").field("value"))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).subAggregation(sum("sum").field("value"))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -504,14 +488,12 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSingleValuedFieldOrderedBySubAggregationAsc() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
.order(BucketOrder.aggregation("sum", true))
|
||||
.subAggregation(max("sum").field("value"))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
.order(BucketOrder.aggregation("sum", true))
|
||||
.subAggregation(max("sum").field("value"))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -528,14 +510,12 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSingleValuedFieldOrderedBySubAggregationDesc() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
.order(BucketOrder.aggregation("sum", false))
|
||||
.subAggregation(max("sum").field("value"))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
.order(BucketOrder.aggregation("sum", false))
|
||||
.subAggregation(max("sum").field("value"))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -552,14 +532,12 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSingleValuedFieldOrderedByMultiValuedSubAggregationDesc() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
.order(BucketOrder.aggregation("stats", "sum", false))
|
||||
.subAggregation(stats("stats").field("value"))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
.order(BucketOrder.aggregation("stats", "sum", false))
|
||||
.subAggregation(stats("stats").field("value"))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -576,14 +554,12 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSingleValuedFieldOrderedByTieBreaker() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
.order(BucketOrder.aggregation("max_constant", randomBoolean()))
|
||||
.subAggregation(max("max_constant").field("constant"))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
.order(BucketOrder.aggregation("max_constant", randomBoolean()))
|
||||
.subAggregation(max("max_constant").field("constant"))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -602,18 +578,16 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
public void testSingleValuedFieldOrderedByIllegalAgg() throws Exception {
|
||||
boolean asc = true;
|
||||
try {
|
||||
client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
.order(BucketOrder.aggregation("inner_histo>avg", asc))
|
||||
.subAggregation(
|
||||
dateHistogram("inner_histo").calendarInterval(DateHistogramInterval.MONTH)
|
||||
.field("dates")
|
||||
.subAggregation(avg("avg").field("value"))
|
||||
)
|
||||
)
|
||||
.get();
|
||||
prepareSearch("idx").addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
.order(BucketOrder.aggregation("inner_histo>avg", asc))
|
||||
.subAggregation(
|
||||
dateHistogram("inner_histo").calendarInterval(DateHistogramInterval.MONTH)
|
||||
.field("dates")
|
||||
.subAggregation(avg("avg").field("value"))
|
||||
)
|
||||
).get();
|
||||
fail("Expected an exception");
|
||||
} catch (SearchPhaseExecutionException e) {
|
||||
ElasticsearchException[] rootCauses = e.guessRootCauses();
|
||||
|
@ -633,13 +607,11 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
public void testSingleValuedFieldWithValueScript() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("fieldname", "date");
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.LONG_PLUS_ONE_MONTH, params))
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.LONG_PLUS_ONE_MONTH, params))
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -682,9 +654,9 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
*/
|
||||
|
||||
public void testMultiValuedField() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(dateHistogram("histo").field("dates").calendarInterval(DateHistogramInterval.MONTH))
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateHistogram("histo").field("dates").calendarInterval(DateHistogramInterval.MONTH)
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -724,11 +696,9 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testMultiValuedFieldOrderedByCountDesc() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("dates").calendarInterval(DateHistogramInterval.MONTH).order(BucketOrder.count(false))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateHistogram("histo").field("dates").calendarInterval(DateHistogramInterval.MONTH).order(BucketOrder.count(false))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -773,13 +743,11 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
public void testMultiValuedFieldWithValueScript() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("fieldname", "dates");
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("dates")
|
||||
.script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.LONG_PLUS_ONE_MONTH, params))
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateHistogram("histo").field("dates")
|
||||
.script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.LONG_PLUS_ONE_MONTH, params))
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -829,12 +797,10 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
public void testScriptSingleValue() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("fieldname", "date");
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
dateHistogram("histo").script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.EXTRACT_FIELD, params))
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateHistogram("histo").script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.EXTRACT_FIELD, params))
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -869,12 +835,10 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
public void testScriptMultiValued() throws Exception {
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("fieldname", "dates");
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
dateHistogram("histo").script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.EXTRACT_FIELD, params))
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateHistogram("histo").script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.EXTRACT_FIELD, params))
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -923,9 +887,9 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
*/
|
||||
|
||||
public void testUnmapped() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx_unmapped")
|
||||
.addAggregation(dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH))
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx_unmapped").addAggregation(
|
||||
dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH)
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -971,8 +935,7 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testEmptyAggregation() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx")
|
||||
.setQuery(matchAllQuery())
|
||||
SearchResponse searchResponse = prepareSearch("empty_bucket_idx").setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
histogram("histo").field("value")
|
||||
.interval(1L)
|
||||
|
@ -1010,8 +973,7 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
}
|
||||
indexRandom(true, reqs);
|
||||
|
||||
SearchResponse response = client().prepareSearch("idx2")
|
||||
.setQuery(matchAllQuery())
|
||||
SearchResponse response = prepareSearch("idx2").setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
dateHistogram("date_histo").field("date")
|
||||
.timeZone(ZoneId.of("-02:00"))
|
||||
|
@ -1106,16 +1068,14 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
|
||||
SearchResponse response = null;
|
||||
try {
|
||||
response = client().prepareSearch("idx2")
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.fixedInterval(DateHistogramInterval.days(interval))
|
||||
.minDocCount(0)
|
||||
// when explicitly specifying a format, the extended bounds should be defined by the same format
|
||||
.extendedBounds(new LongBounds(format(boundsMin, pattern), format(boundsMax, pattern)))
|
||||
.format(pattern)
|
||||
)
|
||||
.get();
|
||||
response = prepareSearch("idx2").addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.fixedInterval(DateHistogramInterval.days(interval))
|
||||
.minDocCount(0)
|
||||
// when explicitly specifying a format, the extended bounds should be defined by the same format
|
||||
.extendedBounds(new LongBounds(format(boundsMin, pattern), format(boundsMax, pattern)))
|
||||
.format(pattern)
|
||||
).get();
|
||||
|
||||
if (invalidBoundsError) {
|
||||
fail("Expected an exception to be thrown when bounds.min is greater than bounds.max");
|
||||
|
@ -1175,10 +1135,9 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
|
||||
SearchResponse response = null;
|
||||
// retrieve those docs with the same time zone and extended bounds
|
||||
response = client().prepareSearch(index)
|
||||
.setQuery(
|
||||
QueryBuilders.rangeQuery("date").from("now/d").to("now/d").includeLower(true).includeUpper(true).timeZone(timezone.getId())
|
||||
)
|
||||
response = prepareSearch(index).setQuery(
|
||||
QueryBuilders.rangeQuery("date").from("now/d").to("now/d").includeLower(true).includeUpper(true).timeZone(timezone.getId())
|
||||
)
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.hours(1))
|
||||
|
@ -1236,15 +1195,13 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
|
||||
SearchResponse response = null;
|
||||
// retrieve those docs with the same time zone and extended bounds
|
||||
response = client().prepareSearch(index)
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.days(1))
|
||||
.offset("+6h")
|
||||
.minDocCount(0)
|
||||
.extendedBounds(new LongBounds("2016-01-01T06:00:00Z", "2016-01-08T08:00:00Z"))
|
||||
)
|
||||
.get();
|
||||
response = prepareSearch(index).addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.days(1))
|
||||
.offset("+6h")
|
||||
.minDocCount(0)
|
||||
.extendedBounds(new LongBounds("2016-01-01T06:00:00Z", "2016-01-08T08:00:00Z"))
|
||||
).get();
|
||||
assertNoFailures(response);
|
||||
|
||||
Histogram histo = response.getAggregations().get("histo");
|
||||
|
@ -1293,8 +1250,7 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
}
|
||||
indexRandom(true, reqs);
|
||||
|
||||
SearchResponse response = client().prepareSearch("idx2")
|
||||
.setQuery(matchAllQuery())
|
||||
SearchResponse response = prepareSearch("idx2").setQuery(matchAllQuery())
|
||||
.addAggregation(dateHistogram("date_histo").field("date").calendarInterval(DateHistogramInterval.DAY))
|
||||
.get();
|
||||
|
||||
|
@ -1313,14 +1269,9 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testIssue6965() {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.timeZone(ZoneId.of("+01:00"))
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
.minDocCount(0)
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateHistogram("histo").field("date").timeZone(ZoneId.of("+01:00")).calendarInterval(DateHistogramInterval.MONTH).minDocCount(0)
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -1362,14 +1313,12 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
client().prepareIndex("test9491").setSource("d", "2014-11-08T13:00:00Z")
|
||||
);
|
||||
ensureSearchable("test9491");
|
||||
SearchResponse response = client().prepareSearch("test9491")
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("d")
|
||||
.calendarInterval(DateHistogramInterval.YEAR)
|
||||
.timeZone(ZoneId.of("Asia/Jerusalem"))
|
||||
.format("yyyy-MM-dd'T'HH:mm:ss.SSSXXXXX")
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("test9491").addAggregation(
|
||||
dateHistogram("histo").field("d")
|
||||
.calendarInterval(DateHistogramInterval.YEAR)
|
||||
.timeZone(ZoneId.of("Asia/Jerusalem"))
|
||||
.format("yyyy-MM-dd'T'HH:mm:ss.SSSXXXXX")
|
||||
).get();
|
||||
assertNoFailures(response);
|
||||
Histogram histo = response.getAggregations().get("histo");
|
||||
assertThat(histo.getBuckets().size(), equalTo(1));
|
||||
|
@ -1386,15 +1335,13 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
client().prepareIndex("test8209").setSource("d", "2014-04-30T00:00:00Z")
|
||||
);
|
||||
ensureSearchable("test8209");
|
||||
SearchResponse response = client().prepareSearch("test8209")
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("d")
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
.format("yyyy-MM-dd'T'HH:mm:ss.SSSXXXXX")
|
||||
.timeZone(ZoneId.of("CET"))
|
||||
.minDocCount(0)
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("test8209").addAggregation(
|
||||
dateHistogram("histo").field("d")
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
.format("yyyy-MM-dd'T'HH:mm:ss.SSSXXXXX")
|
||||
.timeZone(ZoneId.of("CET"))
|
||||
.minDocCount(0)
|
||||
).get();
|
||||
assertNoFailures(response);
|
||||
Histogram histo = response.getAggregations().get("histo");
|
||||
assertThat(histo.getBuckets().size(), equalTo(4));
|
||||
|
@ -1423,15 +1370,13 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
indexRandom(true, client().prepareIndex(indexDateUnmapped).setSource("foo", "bar"));
|
||||
ensureSearchable(indexDateUnmapped);
|
||||
|
||||
SearchResponse response = client().prepareSearch(indexDateUnmapped)
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("dateField")
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
.format("yyyy-MM")
|
||||
.minDocCount(0)
|
||||
.extendedBounds(new LongBounds("2018-01", "2018-01"))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch(indexDateUnmapped).addAggregation(
|
||||
dateHistogram("histo").field("dateField")
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
.format("yyyy-MM")
|
||||
.minDocCount(0)
|
||||
.extendedBounds(new LongBounds("2018-01", "2018-01"))
|
||||
).get();
|
||||
assertNoFailures(response);
|
||||
Histogram histo = response.getAggregations().get("histo");
|
||||
assertThat(histo.getBuckets().size(), equalTo(1));
|
||||
|
@ -1450,25 +1395,21 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
assertAcked(indicesAdmin().prepareCreate(index).setMapping("d", "type=date,format=epoch_millis").get());
|
||||
indexRandom(true, client().prepareIndex(index).setSource("d", "1477954800000"));
|
||||
ensureSearchable(index);
|
||||
SearchResponse response = client().prepareSearch(index)
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("d").calendarInterval(DateHistogramInterval.MONTH).timeZone(ZoneId.of("Europe/Berlin"))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch(index).addAggregation(
|
||||
dateHistogram("histo").field("d").calendarInterval(DateHistogramInterval.MONTH).timeZone(ZoneId.of("Europe/Berlin"))
|
||||
).get();
|
||||
assertNoFailures(response);
|
||||
Histogram histo = response.getAggregations().get("histo");
|
||||
assertThat(histo.getBuckets().size(), equalTo(1));
|
||||
assertThat(histo.getBuckets().get(0).getKeyAsString(), equalTo("1477954800000"));
|
||||
assertThat(histo.getBuckets().get(0).getDocCount(), equalTo(1L));
|
||||
|
||||
response = client().prepareSearch(index)
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("d")
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
.timeZone(ZoneId.of("Europe/Berlin"))
|
||||
.format("yyyy-MM-dd")
|
||||
)
|
||||
.get();
|
||||
response = prepareSearch(index).addAggregation(
|
||||
dateHistogram("histo").field("d")
|
||||
.calendarInterval(DateHistogramInterval.MONTH)
|
||||
.timeZone(ZoneId.of("Europe/Berlin"))
|
||||
.format("yyyy-MM-dd")
|
||||
).get();
|
||||
assertNoFailures(response);
|
||||
histo = response.getAggregations().get("histo");
|
||||
assertThat(histo.getBuckets().size(), equalTo(1));
|
||||
|
@ -1486,8 +1427,7 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
* "2015-10-25T04:00:00.000+01:00".
|
||||
*/
|
||||
public void testDSTEndTransition() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.setQuery(new MatchNoneQueryBuilder())
|
||||
SearchResponse response = prepareSearch("idx").setQuery(new MatchNoneQueryBuilder())
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.timeZone(ZoneId.of("Europe/Oslo"))
|
||||
|
@ -1516,8 +1456,7 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
equalTo(3600000L)
|
||||
);
|
||||
|
||||
response = client().prepareSearch("idx")
|
||||
.setQuery(new MatchNoneQueryBuilder())
|
||||
response = prepareSearch("idx").setQuery(new MatchNoneQueryBuilder())
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.timeZone(ZoneId.of("Europe/Oslo"))
|
||||
|
@ -1578,8 +1517,7 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
// Test that a request using a nondeterministic script does not get cached
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("fieldname", "d");
|
||||
SearchResponse r = client().prepareSearch("cache_test_idx")
|
||||
.setSize(0)
|
||||
SearchResponse r = prepareSearch("cache_test_idx").setSize(0)
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("d")
|
||||
.script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.CURRENT_DATE, params))
|
||||
|
@ -1598,8 +1536,7 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
);
|
||||
|
||||
// Test that a request using a deterministic script gets cached
|
||||
r = client().prepareSearch("cache_test_idx")
|
||||
.setSize(0)
|
||||
r = prepareSearch("cache_test_idx").setSize(0)
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("d")
|
||||
.script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.LONG_PLUS_ONE_MONTH, params))
|
||||
|
@ -1618,8 +1555,7 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
);
|
||||
|
||||
// Ensure that non-scripted requests are cached as normal
|
||||
r = client().prepareSearch("cache_test_idx")
|
||||
.setSize(0)
|
||||
r = prepareSearch("cache_test_idx").setSize(0)
|
||||
.addAggregation(dateHistogram("histo").field("d").calendarInterval(DateHistogramInterval.MONTH))
|
||||
.get();
|
||||
assertNoFailures(r);
|
||||
|
@ -1676,15 +1612,13 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
|
||||
private void assertMultiSortResponse(int[] expectedDays, BucketOrder... order) {
|
||||
ZonedDateTime[] expectedKeys = Arrays.stream(expectedDays).mapToObj(d -> date(1, d)).toArray(ZonedDateTime[]::new);
|
||||
SearchResponse response = client().prepareSearch("sort_idx")
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.DAY)
|
||||
.order(BucketOrder.compound(order))
|
||||
.subAggregation(avg("avg_l").field("l"))
|
||||
.subAggregation(sum("sum_d").field("d"))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("sort_idx").addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.DAY)
|
||||
.order(BucketOrder.compound(order))
|
||||
.subAggregation(avg("avg_l").field("l"))
|
||||
.subAggregation(sum("sum_d").field("d"))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -1722,14 +1656,11 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
indexRandom(true, client().prepareIndex("nanos").setId("2").setSource("date", "2000-01-02"));
|
||||
|
||||
// Search interval 24 hours
|
||||
SearchResponse r = client().prepareSearch("nanos")
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.fixedInterval(DateHistogramInterval.seconds(60 * 60 * 24))
|
||||
.timeZone(ZoneId.of("Europe/Berlin"))
|
||||
)
|
||||
.addDocValueField("date")
|
||||
.get();
|
||||
SearchResponse r = prepareSearch("nanos").addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.fixedInterval(DateHistogramInterval.seconds(60 * 60 * 24))
|
||||
.timeZone(ZoneId.of("Europe/Berlin"))
|
||||
).addDocValueField("date").get();
|
||||
assertNoFailures(r);
|
||||
|
||||
Histogram histogram = r.getAggregations().get("histo");
|
||||
|
@ -1740,12 +1671,9 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
assertEquals(946767600000L, ((ZonedDateTime) buckets.get(1).getKey()).toEpochSecond() * 1000);
|
||||
assertEquals(1, buckets.get(1).getDocCount());
|
||||
|
||||
r = client().prepareSearch("nanos")
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.seconds(60 * 60 * 24)).timeZone(ZoneId.of("UTC"))
|
||||
)
|
||||
.addDocValueField("date")
|
||||
.get();
|
||||
r = prepareSearch("nanos").addAggregation(
|
||||
dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.seconds(60 * 60 * 24)).timeZone(ZoneId.of("UTC"))
|
||||
).addDocValueField("date").get();
|
||||
assertNoFailures(r);
|
||||
|
||||
histogram = r.getAggregations().get("histo");
|
||||
|
@ -1758,11 +1686,9 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testDateKeyFormatting() {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).timeZone(ZoneId.of("America/Edmonton"))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).timeZone(ZoneId.of("America/Edmonton"))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -1774,13 +1700,11 @@ public class DateHistogramIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testHardBoundsOnDates() {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.DAY)
|
||||
.hardBounds(new LongBounds("2012-02-01T00:00:00.000", "2012-03-03T00:00:00.000"))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateHistogram("histo").field("date")
|
||||
.calendarInterval(DateHistogramInterval.DAY)
|
||||
.hardBounds(new LongBounds("2012-02-01T00:00:00.000", "2012-03-03T00:00:00.000"))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
|
|
@ -72,8 +72,7 @@ public class DateHistogramOffsetIT extends ESIntegTestCase {
|
|||
public void testSingleValueWithPositiveOffset() throws Exception {
|
||||
prepareIndex(date("2014-03-11T00:00:00+00:00"), 5, 1, 0);
|
||||
|
||||
SearchResponse response = client().prepareSearch("idx2")
|
||||
.setQuery(matchAllQuery())
|
||||
SearchResponse response = prepareSearch("idx2").setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
dateHistogram("date_histo").field("date").offset("2h").format(DATE_FORMAT).fixedInterval(DateHistogramInterval.DAY)
|
||||
)
|
||||
|
@ -92,8 +91,7 @@ public class DateHistogramOffsetIT extends ESIntegTestCase {
|
|||
public void testSingleValueWithNegativeOffset() throws Exception {
|
||||
prepareIndex(date("2014-03-11T00:00:00+00:00"), 5, -1, 0);
|
||||
|
||||
SearchResponse response = client().prepareSearch("idx2")
|
||||
.setQuery(matchAllQuery())
|
||||
SearchResponse response = prepareSearch("idx2").setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
dateHistogram("date_histo").field("date").offset("-2h").format(DATE_FORMAT).fixedInterval(DateHistogramInterval.DAY)
|
||||
)
|
||||
|
@ -116,8 +114,7 @@ public class DateHistogramOffsetIT extends ESIntegTestCase {
|
|||
prepareIndex(date("2014-03-11T00:00:00+00:00"), 12, 1, 0);
|
||||
prepareIndex(date("2014-03-14T00:00:00+00:00"), 12, 1, 13);
|
||||
|
||||
SearchResponse response = client().prepareSearch("idx2")
|
||||
.setQuery(matchAllQuery())
|
||||
SearchResponse response = prepareSearch("idx2").setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
dateHistogram("date_histo").field("date")
|
||||
.offset("6h")
|
||||
|
|
|
@ -127,14 +127,12 @@ public class DateRangeIT extends ESIntegTestCase {
|
|||
} else {
|
||||
rangeBuilder.script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.EXTRACT_FIELD, params));
|
||||
}
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
rangeBuilder.addUnboundedTo("a long time ago", "now-50y")
|
||||
.addRange("recently", "now-50y", "now-1y")
|
||||
.addUnboundedFrom("last year", "now-1y")
|
||||
.timeZone(ZoneId.of("Etc/GMT+5"))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
rangeBuilder.addUnboundedTo("a long time ago", "now-50y")
|
||||
.addRange("recently", "now-50y", "now-1y")
|
||||
.addUnboundedFrom("last year", "now-1y")
|
||||
.timeZone(ZoneId.of("Etc/GMT+5"))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -162,14 +160,9 @@ public class DateRangeIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSingleValueField() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
dateRange("range").field("date")
|
||||
.addUnboundedTo(date(2, 15))
|
||||
.addRange(date(2, 15), date(3, 15))
|
||||
.addUnboundedFrom(date(3, 15))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateRange("range").field("date").addUnboundedTo(date(2, 15)).addRange(date(2, 15), date(3, 15)).addUnboundedFrom(date(3, 15))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -208,14 +201,12 @@ public class DateRangeIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSingleValueFieldWithStringDates() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
dateRange("range").field("date")
|
||||
.addUnboundedTo("2012-02-15")
|
||||
.addRange("2012-02-15", "2012-03-15")
|
||||
.addUnboundedFrom("2012-03-15")
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateRange("range").field("date")
|
||||
.addUnboundedTo("2012-02-15")
|
||||
.addRange("2012-02-15", "2012-03-15")
|
||||
.addUnboundedFrom("2012-03-15")
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -254,15 +245,13 @@ public class DateRangeIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSingleValueFieldWithStringDatesWithCustomFormat() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
dateRange("range").field("date")
|
||||
.format("yyyy-MM-dd")
|
||||
.addUnboundedTo("2012-02-15")
|
||||
.addRange("2012-02-15", "2012-03-15")
|
||||
.addUnboundedFrom("2012-03-15")
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateRange("range").field("date")
|
||||
.format("yyyy-MM-dd")
|
||||
.addUnboundedTo("2012-02-15")
|
||||
.addRange("2012-02-15", "2012-03-15")
|
||||
.addUnboundedFrom("2012-03-15")
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -308,15 +297,13 @@ public class DateRangeIT extends ESIntegTestCase {
|
|||
String mar15Suffix = timeZoneOffset == 0 ? "Z" : date(3, 15, timezone).format(DateTimeFormatter.ofPattern("xxx", Locale.ROOT));
|
||||
long expectedFirstBucketCount = timeZoneOffset < 0 ? 3L : 2L;
|
||||
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
dateRange("range").field("date")
|
||||
.addUnboundedTo("2012-02-15")
|
||||
.addRange("2012-02-15", "2012-02-15||+1M")
|
||||
.addUnboundedFrom("2012-02-15||+1M")
|
||||
.timeZone(timezone)
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateRange("range").field("date")
|
||||
.addUnboundedTo("2012-02-15")
|
||||
.addRange("2012-02-15", "2012-02-15||+1M")
|
||||
.addUnboundedFrom("2012-02-15||+1M")
|
||||
.timeZone(timezone)
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -355,14 +342,12 @@ public class DateRangeIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSingleValueFieldWithCustomKey() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
dateRange("range").field("date")
|
||||
.addUnboundedTo("r1", date(2, 15))
|
||||
.addRange("r2", date(2, 15), date(3, 15))
|
||||
.addUnboundedFrom("r3", date(3, 15))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateRange("range").field("date")
|
||||
.addUnboundedTo("r1", date(2, 15))
|
||||
.addRange("r2", date(2, 15), date(3, 15))
|
||||
.addUnboundedFrom("r3", date(3, 15))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -410,15 +395,13 @@ public class DateRangeIT extends ESIntegTestCase {
|
|||
*/
|
||||
|
||||
public void testSingleValuedFieldWithSubAggregation() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
dateRange("range").field("date")
|
||||
.addUnboundedTo("r1", date(2, 15))
|
||||
.addRange("r2", date(2, 15), date(3, 15))
|
||||
.addUnboundedFrom("r3", date(3, 15))
|
||||
.subAggregation(sum("sum").field("value"))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateRange("range").field("date")
|
||||
.addUnboundedTo("r1", date(2, 15))
|
||||
.addRange("r2", date(2, 15), date(3, 15))
|
||||
.addUnboundedFrom("r3", date(3, 15))
|
||||
.subAggregation(sum("sum").field("value"))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -486,14 +469,9 @@ public class DateRangeIT extends ESIntegTestCase {
|
|||
*/
|
||||
|
||||
public void testMultiValuedField() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
dateRange("range").field("dates")
|
||||
.addUnboundedTo(date(2, 15))
|
||||
.addRange(date(2, 15), date(3, 15))
|
||||
.addUnboundedFrom(date(3, 15))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
dateRange("range").field("dates").addUnboundedTo(date(2, 15)).addRange(date(2, 15), date(3, 15)).addUnboundedFrom(date(3, 15))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -578,8 +556,7 @@ public class DateRangeIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testEmptyAggregation() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx")
|
||||
.setQuery(matchAllQuery())
|
||||
SearchResponse searchResponse = prepareSearch("empty_bucket_idx").setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
histogram("histo").field("value")
|
||||
.interval(1L)
|
||||
|
@ -608,7 +585,7 @@ public class DateRangeIT extends ESIntegTestCase {
|
|||
|
||||
public void testNoRangesInQuery() {
|
||||
try {
|
||||
client().prepareSearch("idx").addAggregation(dateRange("my_date_range_agg").field("value")).get();
|
||||
prepareSearch("idx").addAggregation(dateRange("my_date_range_agg").field("value")).get();
|
||||
fail();
|
||||
} catch (SearchPhaseExecutionException spee) {
|
||||
Throwable rootCause = spee.getCause().getCause();
|
||||
|
@ -650,8 +627,7 @@ public class DateRangeIT extends ESIntegTestCase {
|
|||
// Test that a request using a nondeterministic script does not get cached
|
||||
Map<String, Object> params = new HashMap<>();
|
||||
params.put("fieldname", "date");
|
||||
SearchResponse r = client().prepareSearch("cache_test_idx")
|
||||
.setSize(0)
|
||||
SearchResponse r = prepareSearch("cache_test_idx").setSize(0)
|
||||
.addAggregation(
|
||||
dateRange("foo").field("date")
|
||||
.script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.CURRENT_DATE, params))
|
||||
|
@ -673,8 +649,7 @@ public class DateRangeIT extends ESIntegTestCase {
|
|||
);
|
||||
|
||||
// Test that a request using a deterministic script gets cached
|
||||
r = client().prepareSearch("cache_test_idx")
|
||||
.setSize(0)
|
||||
r = prepareSearch("cache_test_idx").setSize(0)
|
||||
.addAggregation(
|
||||
dateRange("foo").field("date")
|
||||
.script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.DOUBLE_PLUS_ONE_MONTH, params))
|
||||
|
@ -696,8 +671,7 @@ public class DateRangeIT extends ESIntegTestCase {
|
|||
);
|
||||
|
||||
// Ensure that non-scripted requests are cached as normal
|
||||
r = client().prepareSearch("cache_test_idx")
|
||||
.setSize(0)
|
||||
r = prepareSearch("cache_test_idx").setSize(0)
|
||||
.addAggregation(
|
||||
dateRange("foo").field("date")
|
||||
.addRange(
|
||||
|
@ -734,8 +708,7 @@ public class DateRangeIT extends ESIntegTestCase {
|
|||
|
||||
// using no format should work when to/from is compatible with format in
|
||||
// mapping
|
||||
SearchResponse searchResponse = client().prepareSearch(indexName)
|
||||
.setSize(0)
|
||||
SearchResponse searchResponse = prepareSearch(indexName).setSize(0)
|
||||
.addAggregation(dateRange("date_range").field("date").addRange("00:16:40", "00:50:00").addRange("00:50:00", "01:06:40"))
|
||||
.get();
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L));
|
||||
|
@ -745,8 +718,7 @@ public class DateRangeIT extends ESIntegTestCase {
|
|||
|
||||
// using different format should work when to/from is compatible with
|
||||
// format in aggregation
|
||||
searchResponse = client().prepareSearch(indexName)
|
||||
.setSize(0)
|
||||
searchResponse = prepareSearch(indexName).setSize(0)
|
||||
.addAggregation(
|
||||
dateRange("date_range").field("date").addRange("00.16.40", "00.50.00").addRange("00.50.00", "01.06.40").format("HH.mm.ss")
|
||||
)
|
||||
|
@ -758,8 +730,7 @@ public class DateRangeIT extends ESIntegTestCase {
|
|||
|
||||
// providing numeric input with format should work, but bucket keys are
|
||||
// different now
|
||||
searchResponse = client().prepareSearch(indexName)
|
||||
.setSize(0)
|
||||
searchResponse = prepareSearch(indexName).setSize(0)
|
||||
.addAggregation(
|
||||
dateRange("date_range").field("date").addRange(1000000, 3000000).addRange(3000000, 4000000).format("epoch_millis")
|
||||
)
|
||||
|
@ -772,8 +743,7 @@ public class DateRangeIT extends ESIntegTestCase {
|
|||
// providing numeric input without format should throw an exception
|
||||
ElasticsearchException e = expectThrows(
|
||||
ElasticsearchException.class,
|
||||
() -> client().prepareSearch(indexName)
|
||||
.setSize(0)
|
||||
() -> prepareSearch(indexName).setSize(0)
|
||||
.addAggregation(dateRange("date_range").field("date").addRange(1000000, 3000000).addRange(3000000, 4000000))
|
||||
.get()
|
||||
);
|
||||
|
@ -796,8 +766,7 @@ public class DateRangeIT extends ESIntegTestCase {
|
|||
|
||||
// using no format should work when to/from is compatible with format in
|
||||
// mapping
|
||||
SearchResponse searchResponse = client().prepareSearch(indexName)
|
||||
.setSize(0)
|
||||
SearchResponse searchResponse = prepareSearch(indexName).setSize(0)
|
||||
.addAggregation(dateRange("date_range").field("date").addRange(1000, 3000).addRange(3000, 4000))
|
||||
.get();
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L));
|
||||
|
@ -806,8 +775,7 @@ public class DateRangeIT extends ESIntegTestCase {
|
|||
assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L);
|
||||
|
||||
// using no format should also work when and to/from are string values
|
||||
searchResponse = client().prepareSearch(indexName)
|
||||
.setSize(0)
|
||||
searchResponse = prepareSearch(indexName).setSize(0)
|
||||
.addAggregation(dateRange("date_range").field("date").addRange("1000", "3000").addRange("3000", "4000"))
|
||||
.get();
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L));
|
||||
|
@ -816,8 +784,7 @@ public class DateRangeIT extends ESIntegTestCase {
|
|||
assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L);
|
||||
|
||||
// also e-notation should work, fractional parts should be truncated
|
||||
searchResponse = client().prepareSearch(indexName)
|
||||
.setSize(0)
|
||||
searchResponse = prepareSearch(indexName).setSize(0)
|
||||
.addAggregation(dateRange("date_range").field("date").addRange(1.0e3, 3000.8123).addRange(3000.8123, 4.0e3))
|
||||
.get();
|
||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L));
|
||||
|
@ -827,8 +794,7 @@ public class DateRangeIT extends ESIntegTestCase {
|
|||
|
||||
// using different format should work when to/from is compatible with
|
||||
// format in aggregation
|
||||
searchResponse = client().prepareSearch(indexName)
|
||||
.setSize(0)
|
||||
searchResponse = prepareSearch(indexName).setSize(0)
|
||||
.addAggregation(
|
||||
dateRange("date_range").field("date").addRange("00.16.40", "00.50.00").addRange("00.50.00", "01.06.40").format("HH.mm.ss")
|
||||
)
|
||||
|
@ -840,8 +806,7 @@ public class DateRangeIT extends ESIntegTestCase {
|
|||
|
||||
// providing different numeric input with format should work, but bucket
|
||||
// keys are different now
|
||||
searchResponse = client().prepareSearch(indexName)
|
||||
.setSize(0)
|
||||
searchResponse = prepareSearch(indexName).setSize(0)
|
||||
.addAggregation(
|
||||
dateRange("date_range").field("date").addRange(1000000, 3000000).addRange(3000000, 4000000).format("epoch_millis")
|
||||
)
|
||||
|
|
|
@ -93,8 +93,7 @@ public class DiversifiedSamplerIT extends ESIntegTestCase {
|
|||
// Tests that we can refer to nested elements under a sample in a path
|
||||
// statement
|
||||
boolean asc = randomBoolean();
|
||||
SearchResponse response = client().prepareSearch("test")
|
||||
.setSearchType(SearchType.QUERY_THEN_FETCH)
|
||||
SearchResponse response = prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH)
|
||||
.addAggregation(
|
||||
terms("genres").field("genre")
|
||||
.order(BucketOrder.aggregation("sample>max_price.value", asc))
|
||||
|
@ -126,8 +125,7 @@ public class DiversifiedSamplerIT extends ESIntegTestCase {
|
|||
DiversifiedAggregationBuilder sampleAgg = new DiversifiedAggregationBuilder("sample").shardSize(100);
|
||||
sampleAgg.field("author").maxDocsPerValue(MAX_DOCS_PER_AUTHOR).executionHint(randomExecutionHint());
|
||||
sampleAgg.subAggregation(terms("authors").field("author"));
|
||||
SearchResponse response = client().prepareSearch("test")
|
||||
.setSearchType(SearchType.QUERY_THEN_FETCH)
|
||||
SearchResponse response = prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH)
|
||||
.setQuery(new TermQueryBuilder("genre", "fantasy"))
|
||||
.setFrom(0)
|
||||
.setSize(60)
|
||||
|
@ -153,7 +151,7 @@ public class DiversifiedSamplerIT extends ESIntegTestCase {
|
|||
sampleAgg.subAggregation(terms("authors").field("author"));
|
||||
|
||||
rootTerms.subAggregation(sampleAgg);
|
||||
SearchResponse response = client().prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH).addAggregation(rootTerms).get();
|
||||
SearchResponse response = prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH).addAggregation(rootTerms).get();
|
||||
assertNoFailures(response);
|
||||
Terms genres = response.getAggregations().get("genres");
|
||||
List<? extends Bucket> genreBuckets = genres.getBuckets();
|
||||
|
@ -182,10 +180,7 @@ public class DiversifiedSamplerIT extends ESIntegTestCase {
|
|||
sampleAgg.subAggregation(terms("genres").field("genre"));
|
||||
|
||||
rootSample.subAggregation(sampleAgg);
|
||||
SearchResponse response = client().prepareSearch("test")
|
||||
.setSearchType(SearchType.QUERY_THEN_FETCH)
|
||||
.addAggregation(rootSample)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH).addAggregation(rootSample).get();
|
||||
assertNoFailures(response);
|
||||
Sampler genreSample = response.getAggregations().get("genreSample");
|
||||
Sampler sample = genreSample.getAggregations().get("sample");
|
||||
|
@ -249,8 +244,7 @@ public class DiversifiedSamplerIT extends ESIntegTestCase {
|
|||
DiversifiedAggregationBuilder sampleAgg = new DiversifiedAggregationBuilder("sample").shardSize(Integer.MAX_VALUE);
|
||||
sampleAgg.field("author").maxDocsPerValue(MAX_DOCS_PER_AUTHOR).executionHint(randomExecutionHint());
|
||||
sampleAgg.subAggregation(terms("authors").field("author"));
|
||||
SearchResponse response = client().prepareSearch("test")
|
||||
.setSearchType(SearchType.QUERY_THEN_FETCH)
|
||||
SearchResponse response = prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH)
|
||||
.setQuery(new TermQueryBuilder("genre", "fantasy"))
|
||||
.setFrom(0)
|
||||
.setSize(60)
|
||||
|
@ -261,8 +255,7 @@ public class DiversifiedSamplerIT extends ESIntegTestCase {
|
|||
sampleAgg = new DiversifiedAggregationBuilder("sample").shardSize(100);
|
||||
sampleAgg.field("author").maxDocsPerValue(Integer.MAX_VALUE).executionHint(randomExecutionHint());
|
||||
sampleAgg.subAggregation(terms("authors").field("author"));
|
||||
response = client().prepareSearch("test")
|
||||
.setSearchType(SearchType.QUERY_THEN_FETCH)
|
||||
response = prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH)
|
||||
.setQuery(new TermQueryBuilder("genre", "fantasy"))
|
||||
.setFrom(0)
|
||||
.setSize(60)
|
||||
|
|
|
@ -255,14 +255,12 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
|
|||
public void testSizeIsZero() {
|
||||
IllegalArgumentException exception = expectThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> client().prepareSearch("high_card_idx")
|
||||
.addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME)
|
||||
.minDocCount(randomInt(1))
|
||||
.size(0)
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
)
|
||||
.get()
|
||||
() -> prepareSearch("high_card_idx").addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME)
|
||||
.minDocCount(randomInt(1))
|
||||
.size(0)
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
).get()
|
||||
);
|
||||
assertThat(exception.getMessage(), containsString("[size] must be greater than 0. Found [0] in [terms]"));
|
||||
}
|
||||
|
@ -277,11 +275,9 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
|
|||
|
||||
private void runTestFieldWithPartitionedFiltering(String field) throws Exception {
|
||||
// Find total number of unique terms
|
||||
SearchResponse allResponse = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(field).size(10000).collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
)
|
||||
.get();
|
||||
SearchResponse allResponse = prepareSearch("idx").addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(field).size(10000).collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
).get();
|
||||
assertNoFailures(allResponse);
|
||||
DoubleTerms terms = allResponse.getAggregations().get("terms");
|
||||
assertThat(terms, notNullValue());
|
||||
|
@ -292,13 +288,11 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
|
|||
final int numPartitions = randomIntBetween(2, 4);
|
||||
Set<Number> foundTerms = new HashSet<>();
|
||||
for (int partition = 0; partition < numPartitions; partition++) {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(field)
|
||||
.includeExclude(new IncludeExclude(partition, numPartitions))
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(field)
|
||||
.includeExclude(new IncludeExclude(partition, numPartitions))
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
).get();
|
||||
assertNoFailures(response);
|
||||
terms = response.getAggregations().get("terms");
|
||||
assertThat(terms, notNullValue());
|
||||
|
@ -312,13 +306,11 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
|
|||
}
|
||||
|
||||
public void testSingleValuedFieldWithValueScript() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME)
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap()))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME)
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap()))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -337,13 +329,11 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
|
|||
}
|
||||
|
||||
public void testMultiValuedFieldWithValueScript() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(MULTI_VALUED_FIELD_NAME)
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap()))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(MULTI_VALUED_FIELD_NAME)
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap()))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -366,13 +356,11 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
|
|||
}
|
||||
|
||||
public void testMultiValuedFieldWithValueScriptNotUnique() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(MULTI_VALUED_FIELD_NAME)
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "(long) (_value / 1000 + 1)", Collections.emptyMap()))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(MULTI_VALUED_FIELD_NAME)
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "(long) (_value / 1000 + 1)", Collections.emptyMap()))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -406,20 +394,18 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
|
|||
*/
|
||||
|
||||
public void testScriptSingleValue() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
new TermsAggregationBuilder("terms").collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.userValueTypeHint(ValueType.DOUBLE)
|
||||
.script(
|
||||
new Script(
|
||||
ScriptType.INLINE,
|
||||
CustomScriptPlugin.NAME,
|
||||
"doc['" + MULTI_VALUED_FIELD_NAME + "'].value",
|
||||
Collections.emptyMap()
|
||||
)
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
new TermsAggregationBuilder("terms").collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.userValueTypeHint(ValueType.DOUBLE)
|
||||
.script(
|
||||
new Script(
|
||||
ScriptType.INLINE,
|
||||
CustomScriptPlugin.NAME,
|
||||
"doc['" + MULTI_VALUED_FIELD_NAME + "'].value",
|
||||
Collections.emptyMap()
|
||||
)
|
||||
)
|
||||
.get();
|
||||
)
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -438,20 +424,13 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
|
|||
}
|
||||
|
||||
public void testScriptMultiValued() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
new TermsAggregationBuilder("terms").collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.userValueTypeHint(ValueType.DOUBLE)
|
||||
.script(
|
||||
new Script(
|
||||
ScriptType.INLINE,
|
||||
CustomScriptPlugin.NAME,
|
||||
"doc['" + MULTI_VALUED_FIELD_NAME + "']",
|
||||
Collections.emptyMap()
|
||||
)
|
||||
)
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
new TermsAggregationBuilder("terms").collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.userValueTypeHint(ValueType.DOUBLE)
|
||||
.script(
|
||||
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['" + MULTI_VALUED_FIELD_NAME + "']", Collections.emptyMap())
|
||||
)
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -524,18 +503,16 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
|
|||
|
||||
public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscWithSubTermsAgg() throws Exception {
|
||||
boolean asc = true;
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME)
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.order(BucketOrder.aggregation("avg_i", asc))
|
||||
.subAggregation(avg("avg_i").field(SINGLE_VALUED_FIELD_NAME))
|
||||
.subAggregation(
|
||||
new TermsAggregationBuilder("subTerms").field(MULTI_VALUED_FIELD_NAME)
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
)
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME)
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.order(BucketOrder.aggregation("avg_i", asc))
|
||||
.subAggregation(avg("avg_i").field(SINGLE_VALUED_FIELD_NAME))
|
||||
.subAggregation(
|
||||
new TermsAggregationBuilder("subTerms").field(MULTI_VALUED_FIELD_NAME)
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
)
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -569,14 +546,12 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
|
|||
|
||||
public void testSingleValuedFieldOrderedBySingleBucketSubAggregationAsc() throws Exception {
|
||||
boolean asc = randomBoolean();
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
new TermsAggregationBuilder("num_tags").field("num_tag")
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.order(BucketOrder.aggregation("filter", asc))
|
||||
.subAggregation(filter("filter", QueryBuilders.matchAllQuery()))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
new TermsAggregationBuilder("num_tags").field("num_tag")
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.order(BucketOrder.aggregation("filter", asc))
|
||||
.subAggregation(filter("filter", QueryBuilders.matchAllQuery()))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -606,18 +581,16 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
|
|||
|
||||
public void testSingleValuedFieldOrderedBySubAggregationAscMultiHierarchyLevels() throws Exception {
|
||||
boolean asc = randomBoolean();
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
new TermsAggregationBuilder("tags").field("num_tag")
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.order(BucketOrder.aggregation("filter1>filter2>max", asc))
|
||||
.subAggregation(
|
||||
filter("filter1", QueryBuilders.matchAllQuery()).subAggregation(
|
||||
filter("filter2", QueryBuilders.matchAllQuery()).subAggregation(max("max").field(SINGLE_VALUED_FIELD_NAME))
|
||||
)
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
new TermsAggregationBuilder("tags").field("num_tag")
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.order(BucketOrder.aggregation("filter1>filter2>max", asc))
|
||||
.subAggregation(
|
||||
filter("filter1", QueryBuilders.matchAllQuery()).subAggregation(
|
||||
filter("filter2", QueryBuilders.matchAllQuery()).subAggregation(max("max").field(SINGLE_VALUED_FIELD_NAME))
|
||||
)
|
||||
)
|
||||
.get();
|
||||
)
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -663,13 +636,11 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
|
|||
public void testSingleValuedFieldOrderedByMissingSubAggregation() throws Exception {
|
||||
for (String index : Arrays.asList("idx", "idx_unmapped")) {
|
||||
try {
|
||||
client().prepareSearch(index)
|
||||
.addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME)
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.order(BucketOrder.aggregation("avg_i", true))
|
||||
)
|
||||
.get();
|
||||
prepareSearch(index).addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME)
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.order(BucketOrder.aggregation("avg_i", true))
|
||||
).get();
|
||||
|
||||
fail("Expected search to fail when trying to sort terms aggregation by sug-aggregation that doesn't exist");
|
||||
|
||||
|
@ -682,17 +653,14 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
|
|||
public void testSingleValuedFieldOrderedByNonMetricsOrMultiBucketSubAggregation() throws Exception {
|
||||
for (String index : Arrays.asList("idx", "idx_unmapped")) {
|
||||
try {
|
||||
client().prepareSearch(index)
|
||||
.addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME)
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.order(BucketOrder.aggregation("num_tags", true))
|
||||
.subAggregation(
|
||||
new TermsAggregationBuilder("num_tags").field("num_tags")
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
)
|
||||
)
|
||||
.get();
|
||||
prepareSearch(index).addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME)
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.order(BucketOrder.aggregation("num_tags", true))
|
||||
.subAggregation(
|
||||
new TermsAggregationBuilder("num_tags").field("num_tags").collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
)
|
||||
).get();
|
||||
|
||||
fail("Expected search to fail when trying to sort terms aggregation by sug-aggregation which is not of a metrics type");
|
||||
|
||||
|
@ -705,14 +673,12 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
|
|||
public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithUnknownMetric() throws Exception {
|
||||
for (String index : Arrays.asList("idx", "idx_unmapped")) {
|
||||
try {
|
||||
client().prepareSearch(index)
|
||||
.addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME + "2")
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.order(BucketOrder.aggregation("stats.foo", true))
|
||||
.subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME))
|
||||
)
|
||||
.get();
|
||||
prepareSearch(index).addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME + "2")
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.order(BucketOrder.aggregation("stats.foo", true))
|
||||
.subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME))
|
||||
).get();
|
||||
|
||||
fail(
|
||||
"Expected search to fail when trying to sort terms aggregation by multi-valued sug-aggregation "
|
||||
|
@ -728,14 +694,12 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
|
|||
public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithoutMetric() throws Exception {
|
||||
for (String index : Arrays.asList("idx", "idx_unmapped")) {
|
||||
try {
|
||||
client().prepareSearch(index)
|
||||
.addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME)
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.order(BucketOrder.aggregation("stats", true))
|
||||
.subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME))
|
||||
)
|
||||
.get();
|
||||
prepareSearch(index).addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME)
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.order(BucketOrder.aggregation("stats", true))
|
||||
.subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME))
|
||||
).get();
|
||||
|
||||
fail(
|
||||
"Expected search to fail when trying to sort terms aggregation by multi-valued sug-aggregation "
|
||||
|
@ -750,14 +714,12 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
|
|||
|
||||
public void testSingleValuedFieldOrderedByMultiValueSubAggregationAsc() throws Exception {
|
||||
boolean asc = true;
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME)
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.order(BucketOrder.aggregation("stats.avg", asc))
|
||||
.subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME)
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.order(BucketOrder.aggregation("stats.avg", asc))
|
||||
.subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -780,14 +742,12 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
|
|||
|
||||
public void testSingleValuedFieldOrderedByMultiValueSubAggregationDesc() throws Exception {
|
||||
boolean asc = false;
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME)
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.order(BucketOrder.aggregation("stats.avg", asc))
|
||||
.subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME)
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.order(BucketOrder.aggregation("stats.avg", asc))
|
||||
.subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -810,14 +770,12 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
|
|||
|
||||
public void testSingleValuedFieldOrderedByMultiValueExtendedStatsAsc() throws Exception {
|
||||
boolean asc = true;
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME)
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.order(BucketOrder.aggregation("stats.variance", asc))
|
||||
.subAggregation(extendedStats("stats").field(SINGLE_VALUED_FIELD_NAME))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME)
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.order(BucketOrder.aggregation("stats.variance", asc))
|
||||
.subAggregation(extendedStats("stats").field(SINGLE_VALUED_FIELD_NAME))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -854,8 +812,7 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
|
|||
Collections.emptyMap()
|
||||
);
|
||||
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.setQuery(functionScoreQuery(scriptFunction(scoringScript)))
|
||||
SearchResponse response = prepareSearch("idx").setQuery(functionScoreQuery(scriptFunction(scoringScript)))
|
||||
.addAggregation(
|
||||
new TermsAggregationBuilder("terms").collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.userValueTypeHint(ValueType.DOUBLE)
|
||||
|
@ -920,15 +877,13 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
|
|||
}
|
||||
|
||||
private void assertMultiSortResponse(double[] expectedKeys, BucketOrder... order) {
|
||||
SearchResponse response = client().prepareSearch("sort_idx")
|
||||
.addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME)
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.order(BucketOrder.compound(order))
|
||||
.subAggregation(avg("avg_l").field("l"))
|
||||
.subAggregation(sum("sum_d").field("d"))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("sort_idx").addAggregation(
|
||||
new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME)
|
||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||
.order(BucketOrder.compound(order))
|
||||
.subAggregation(avg("avg_l").field("l"))
|
||||
.subAggregation(sum("sum_d").field("d"))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -983,8 +938,7 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
|
|||
);
|
||||
|
||||
// Test that a request using a nondeterministic script does not get cached
|
||||
SearchResponse r = client().prepareSearch("cache_test_idx")
|
||||
.setSize(0)
|
||||
SearchResponse r = prepareSearch("cache_test_idx").setSize(0)
|
||||
.addAggregation(
|
||||
new TermsAggregationBuilder("terms").field("d")
|
||||
.script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "Math.random()", Collections.emptyMap()))
|
||||
|
@ -1002,8 +956,7 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
|
|||
);
|
||||
|
||||
// Test that a request using a deterministic script gets cached
|
||||
r = client().prepareSearch("cache_test_idx")
|
||||
.setSize(0)
|
||||
r = prepareSearch("cache_test_idx").setSize(0)
|
||||
.addAggregation(
|
||||
new TermsAggregationBuilder("terms").field("d")
|
||||
.script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap()))
|
||||
|
@ -1021,7 +974,7 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
|
|||
);
|
||||
|
||||
// Ensure that non-scripted requests are cached as normal
|
||||
r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(new TermsAggregationBuilder("terms").field("d")).get();
|
||||
r = prepareSearch("cache_test_idx").setSize(0).addAggregation(new TermsAggregationBuilder("terms").field("d")).get();
|
||||
assertNoFailures(r);
|
||||
|
||||
assertThat(
|
||||
|
|
|
@ -79,7 +79,7 @@ public class FilterIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSimple() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx").addAggregation(filter("tag1", termQuery("tag", "tag1"))).get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(filter("tag1", termQuery("tag", "tag1"))).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -93,7 +93,7 @@ public class FilterIT extends ESIntegTestCase {
|
|||
// https://github.com/elastic/elasticsearch/issues/8438
|
||||
public void testEmptyFilterDeclarations() throws Exception {
|
||||
QueryBuilder emptyFilter = new BoolQueryBuilder();
|
||||
SearchResponse response = client().prepareSearch("idx").addAggregation(filter("tag1", emptyFilter)).get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(filter("tag1", emptyFilter)).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -103,9 +103,9 @@ public class FilterIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testWithSubAggregation() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(filter("tag1", termQuery("tag", "tag1")).subAggregation(avg("avg_value").field("value")))
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
filter("tag1", termQuery("tag", "tag1")).subAggregation(avg("avg_value").field("value"))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -128,9 +128,9 @@ public class FilterIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testAsSubAggregation() {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(histogram("histo").field("value").interval(2L).subAggregation(filter("filter", matchAllQuery())))
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
histogram("histo").field("value").interval(2L).subAggregation(filter("filter", matchAllQuery()))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -147,7 +147,7 @@ public class FilterIT extends ESIntegTestCase {
|
|||
|
||||
public void testWithContextBasedSubAggregation() throws Exception {
|
||||
try {
|
||||
client().prepareSearch("idx").addAggregation(filter("tag1", termQuery("tag", "tag1")).subAggregation(avg("avg_value"))).get();
|
||||
prepareSearch("idx").addAggregation(filter("tag1", termQuery("tag", "tag1")).subAggregation(avg("avg_value"))).get();
|
||||
|
||||
fail(
|
||||
"expected execution to fail - an attempt to have a context based numeric sub-aggregation, but there is not value source"
|
||||
|
@ -160,8 +160,7 @@ public class FilterIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testEmptyAggregation() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx")
|
||||
.setQuery(matchAllQuery())
|
||||
SearchResponse searchResponse = prepareSearch("empty_bucket_idx").setQuery(matchAllQuery())
|
||||
.addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(filter("filter", matchAllQuery())))
|
||||
.get();
|
||||
|
||||
|
|
|
@ -98,14 +98,12 @@ public class FiltersIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSimple() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
filters(
|
||||
"tags",
|
||||
randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2")))
|
||||
)
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
filters(
|
||||
"tags",
|
||||
randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2")))
|
||||
)
|
||||
.get();
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -128,11 +126,9 @@ public class FiltersIT extends ESIntegTestCase {
|
|||
// https://github.com/elastic/elasticsearch/issues/8438
|
||||
public void testEmptyFilterDeclarations() throws Exception {
|
||||
QueryBuilder emptyFilter = new BoolQueryBuilder();
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
filters("tags", randomOrder(new KeyedFilter("all", emptyFilter), new KeyedFilter("tag1", termQuery("tag", "tag1"))))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
filters("tags", randomOrder(new KeyedFilter("all", emptyFilter), new KeyedFilter("tag1", termQuery("tag", "tag1"))))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -147,14 +143,12 @@ public class FiltersIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testWithSubAggregation() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
filters(
|
||||
"tags",
|
||||
randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2")))
|
||||
).subAggregation(avg("avg_value").field("value"))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
filters(
|
||||
"tags",
|
||||
randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2")))
|
||||
).subAggregation(avg("avg_value").field("value"))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -202,9 +196,9 @@ public class FiltersIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testAsSubAggregation() {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(histogram("histo").field("value").interval(2L).subAggregation(filters("filters", matchAllQuery())))
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
histogram("histo").field("value").interval(2L).subAggregation(filters("filters", matchAllQuery()))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -224,14 +218,12 @@ public class FiltersIT extends ESIntegTestCase {
|
|||
public void testWithContextBasedSubAggregation() throws Exception {
|
||||
|
||||
try {
|
||||
client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
filters(
|
||||
"tags",
|
||||
randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2")))
|
||||
).subAggregation(avg("avg_value"))
|
||||
)
|
||||
.get();
|
||||
prepareSearch("idx").addAggregation(
|
||||
filters(
|
||||
"tags",
|
||||
randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2")))
|
||||
).subAggregation(avg("avg_value"))
|
||||
).get();
|
||||
|
||||
fail(
|
||||
"expected execution to fail - an attempt to have a context based numeric sub-aggregation, but there is not value source"
|
||||
|
@ -244,8 +236,7 @@ public class FiltersIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testEmptyAggregation() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx")
|
||||
.setQuery(matchAllQuery())
|
||||
SearchResponse searchResponse = prepareSearch("empty_bucket_idx").setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
histogram("histo").field("value")
|
||||
.interval(1L)
|
||||
|
@ -269,8 +260,7 @@ public class FiltersIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSimpleNonKeyed() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(filters("tags", termQuery("tag", "tag1"), termQuery("tag", "tag2")))
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(filters("tags", termQuery("tag", "tag1"), termQuery("tag", "tag2")))
|
||||
.get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
@ -294,14 +284,12 @@ public class FiltersIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testOtherBucket() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
filters(
|
||||
"tags",
|
||||
randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2")))
|
||||
).otherBucket(true)
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
filters(
|
||||
"tags",
|
||||
randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2")))
|
||||
).otherBucket(true)
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -325,14 +313,12 @@ public class FiltersIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testOtherNamedBucket() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
filters(
|
||||
"tags",
|
||||
randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2")))
|
||||
).otherBucket(true).otherBucketKey("foobar")
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
filters(
|
||||
"tags",
|
||||
randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2")))
|
||||
).otherBucket(true).otherBucketKey("foobar")
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -356,9 +342,9 @@ public class FiltersIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testOtherNonKeyed() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(filters("tags", termQuery("tag", "tag1"), termQuery("tag", "tag2")).otherBucket(true))
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
filters("tags", termQuery("tag", "tag1"), termQuery("tag", "tag2")).otherBucket(true)
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -385,14 +371,12 @@ public class FiltersIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testOtherWithSubAggregation() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
filters(
|
||||
"tags",
|
||||
randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2")))
|
||||
).otherBucket(true).subAggregation(avg("avg_value").field("value"))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
filters(
|
||||
"tags",
|
||||
randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2")))
|
||||
).otherBucket(true).subAggregation(avg("avg_value").field("value"))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -456,8 +440,7 @@ public class FiltersIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testEmptyAggregationWithOtherBucket() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx")
|
||||
.setQuery(matchAllQuery())
|
||||
SearchResponse searchResponse = prepareSearch("empty_bucket_idx").setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
histogram("histo").field("value")
|
||||
.interval(1L)
|
||||
|
|
|
@ -142,7 +142,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
|||
for (Consumer<GeoDistanceAggregationBuilder> range : ranges) {
|
||||
range.accept(builder);
|
||||
}
|
||||
SearchResponse response = client().prepareSearch("idx").addAggregation(builder).get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(builder).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -181,15 +181,13 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSimpleWithCustomKeys() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)).field("location")
|
||||
.unit(DistanceUnit.KILOMETERS)
|
||||
.addUnboundedTo("ring1", 500)
|
||||
.addRange("ring2", 500, 1000)
|
||||
.addUnboundedFrom("ring3", 1000)
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)).field("location")
|
||||
.unit(DistanceUnit.KILOMETERS)
|
||||
.addUnboundedTo("ring1", 500)
|
||||
.addRange("ring2", 500, 1000)
|
||||
.addUnboundedFrom("ring3", 1000)
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -230,15 +228,13 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
|||
public void testUnmapped() throws Exception {
|
||||
clusterAdmin().prepareHealth("idx_unmapped").setWaitForYellowStatus().get();
|
||||
|
||||
SearchResponse response = client().prepareSearch("idx_unmapped")
|
||||
.addAggregation(
|
||||
geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)).field("location")
|
||||
.unit(DistanceUnit.KILOMETERS)
|
||||
.addUnboundedTo(500)
|
||||
.addRange(500, 1000)
|
||||
.addUnboundedFrom(1000)
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx_unmapped").addAggregation(
|
||||
geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)).field("location")
|
||||
.unit(DistanceUnit.KILOMETERS)
|
||||
.addUnboundedTo(500)
|
||||
.addRange(500, 1000)
|
||||
.addUnboundedFrom(1000)
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -324,16 +320,14 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testWithSubAggregation() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)).field("location")
|
||||
.unit(DistanceUnit.KILOMETERS)
|
||||
.addUnboundedTo(500)
|
||||
.addRange(500, 1000)
|
||||
.addUnboundedFrom(1000)
|
||||
.subAggregation(terms("cities").field("city").collectMode(randomFrom(SubAggCollectionMode.values())))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)).field("location")
|
||||
.unit(DistanceUnit.KILOMETERS)
|
||||
.addUnboundedTo(500)
|
||||
.addRange(500, 1000)
|
||||
.addUnboundedFrom(1000)
|
||||
.subAggregation(terms("cities").field("city").collectMode(randomFrom(SubAggCollectionMode.values())))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -409,8 +403,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testEmptyAggregation() throws Exception {
|
||||
SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx")
|
||||
.setQuery(matchAllQuery())
|
||||
SearchResponse searchResponse = prepareSearch("empty_bucket_idx").setQuery(matchAllQuery())
|
||||
.addAggregation(
|
||||
histogram("histo").field("value")
|
||||
.interval(1L)
|
||||
|
@ -441,7 +434,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
|||
|
||||
public void testNoRangesInQuery() {
|
||||
try {
|
||||
client().prepareSearch("idx").addAggregation(geoDistance("geo_dist", new GeoPoint(52.3760, 4.894)).field("location")).get();
|
||||
prepareSearch("idx").addAggregation(geoDistance("geo_dist", new GeoPoint(52.3760, 4.894)).field("location")).get();
|
||||
fail();
|
||||
} catch (SearchPhaseExecutionException spee) {
|
||||
Throwable rootCause = spee.getCause().getCause();
|
||||
|
@ -451,16 +444,14 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testMultiValues() throws Exception {
|
||||
SearchResponse response = client().prepareSearch("idx-multi")
|
||||
.addAggregation(
|
||||
geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)).field("location")
|
||||
.unit(DistanceUnit.KILOMETERS)
|
||||
.distanceType(org.elasticsearch.common.geo.GeoDistance.ARC)
|
||||
.addUnboundedTo(500)
|
||||
.addRange(500, 1000)
|
||||
.addUnboundedFrom(1000)
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx-multi").addAggregation(
|
||||
geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)).field("location")
|
||||
.unit(DistanceUnit.KILOMETERS)
|
||||
.distanceType(org.elasticsearch.common.geo.GeoDistance.ARC)
|
||||
.addUnboundedTo(500)
|
||||
.addRange(500, 1000)
|
||||
.addUnboundedFrom(1000)
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
|
|
@ -132,8 +132,7 @@ public class GeoHashGridIT extends ESIntegTestCase {
|
|||
|
||||
public void testSimple() throws Exception {
|
||||
for (int precision = 1; precision <= PRECISION; precision++) {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(geohashGrid("geohashgrid").field("location").precision(precision))
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(geohashGrid("geohashgrid").field("location").precision(precision))
|
||||
.get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
@ -159,9 +158,9 @@ public class GeoHashGridIT extends ESIntegTestCase {
|
|||
|
||||
public void testMultivalued() throws Exception {
|
||||
for (int precision = 1; precision <= PRECISION; precision++) {
|
||||
SearchResponse response = client().prepareSearch("multi_valued_idx")
|
||||
.addAggregation(geohashGrid("geohashgrid").field("location").precision(precision))
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("multi_valued_idx").addAggregation(
|
||||
geohashGrid("geohashgrid").field("location").precision(precision)
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -181,12 +180,10 @@ public class GeoHashGridIT extends ESIntegTestCase {
|
|||
GeoBoundingBoxQueryBuilder bbox = new GeoBoundingBoxQueryBuilder("location");
|
||||
bbox.setCorners(smallestGeoHash).queryName("bbox");
|
||||
for (int precision = 1; precision <= PRECISION; precision++) {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
AggregationBuilders.filter("filtered", bbox)
|
||||
.subAggregation(geohashGrid("geohashgrid").field("location").precision(precision))
|
||||
)
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
AggregationBuilders.filter("filtered", bbox)
|
||||
.subAggregation(geohashGrid("geohashgrid").field("location").precision(precision))
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -207,9 +204,9 @@ public class GeoHashGridIT extends ESIntegTestCase {
|
|||
|
||||
public void testUnmapped() throws Exception {
|
||||
for (int precision = 1; precision <= PRECISION; precision++) {
|
||||
SearchResponse response = client().prepareSearch("idx_unmapped")
|
||||
.addAggregation(geohashGrid("geohashgrid").field("location").precision(precision))
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx_unmapped").addAggregation(
|
||||
geohashGrid("geohashgrid").field("location").precision(precision)
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -241,9 +238,9 @@ public class GeoHashGridIT extends ESIntegTestCase {
|
|||
|
||||
public void testTopMatch() throws Exception {
|
||||
for (int precision = 1; precision <= PRECISION; precision++) {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(geohashGrid("geohashgrid").field("location").size(1).shardSize(100).precision(precision))
|
||||
.get();
|
||||
SearchResponse response = prepareSearch("idx").addAggregation(
|
||||
geohashGrid("geohashgrid").field("location").size(1).shardSize(100).precision(precision)
|
||||
).get();
|
||||
|
||||
assertNoFailures(response);
|
||||
|
||||
|
@ -270,9 +267,7 @@ public class GeoHashGridIT extends ESIntegTestCase {
|
|||
final int shardSize = 10000;
|
||||
IllegalArgumentException exception = expectThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> client().prepareSearch("idx")
|
||||
.addAggregation(geohashGrid("geohashgrid").field("location").size(size).shardSize(shardSize))
|
||||
.get()
|
||||
() -> prepareSearch("idx").addAggregation(geohashGrid("geohashgrid").field("location").size(size).shardSize(shardSize)).get()
|
||||
);
|
||||
assertThat(exception.getMessage(), containsString("[size] must be greater than 0. Found [0] in [geohashgrid]"));
|
||||
}
|
||||
|
@ -282,9 +277,7 @@ public class GeoHashGridIT extends ESIntegTestCase {
|
|||
final int shardSize = 0;
|
||||
IllegalArgumentException exception = expectThrows(
|
||||
IllegalArgumentException.class,
|
||||
() -> client().prepareSearch("idx")
|
||||
.addAggregation(geohashGrid("geohashgrid").field("location").size(size).shardSize(shardSize))
|
||||
.get()
|
||||
() -> prepareSearch("idx").addAggregation(geohashGrid("geohashgrid").field("location").size(size).shardSize(shardSize)).get()
|
||||
);
|
||||
assertThat(exception.getMessage(), containsString("[shardSize] must be greater than 0. Found [0] in [geohashgrid]"));
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue