mirror of
https://github.com/elastic/elasticsearch.git
synced 2025-04-25 07:37:19 -04:00
Profile the fetch phase (#77064)
This adds profiling to the fetch phase so we can tell when fetching is slower than we'd like and we can tell which portion of the fetch is slow. The output includes which stored fields were loaded, how long it took to load stored fields, which fetch sub-phases were run, and how long those fetch sub-phases took. Closes #75892 * Skip bwc * Don't compare fetch profiles * Use passed one * no npe * Do last rename * Move method down * serialization tests * Fix sneaky serialization * Test for sneaky bug * license header * Document * Fix test * newline * Restore assertion * unit test merging * Handle inner hits * Fixup * Revert unneeded * Revert inner hits profiling * Fix names * Fixup names * Move results building * Drop loaded_nested * Checkstyle * Fixup more * Finish writeable cleanup Add unit tests for merge * Remove null checking builder * Fix wire mistake How did this pass before?! * Rename * Remove funny builder * Remove name munging
This commit is contained in:
parent
97f42aabbb
commit
c2c0165fd2
54 changed files with 1864 additions and 543 deletions
|
@ -48,9 +48,9 @@ import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.document.DocumentField;
|
import org.elasticsearch.common.document.DocumentField;
|
||||||
import org.elasticsearch.common.text.Text;
|
import org.elasticsearch.common.text.Text;
|
||||||
import org.elasticsearch.common.unit.Fuzziness;
|
import org.elasticsearch.common.unit.Fuzziness;
|
||||||
import org.elasticsearch.core.TimeValue;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
import org.elasticsearch.common.xcontent.XContentType;
|
||||||
|
import org.elasticsearch.core.TimeValue;
|
||||||
import org.elasticsearch.index.get.GetResult;
|
import org.elasticsearch.index.get.GetResult;
|
||||||
import org.elasticsearch.index.query.MatchQueryBuilder;
|
import org.elasticsearch.index.query.MatchQueryBuilder;
|
||||||
import org.elasticsearch.index.query.QueryBuilder;
|
import org.elasticsearch.index.query.QueryBuilder;
|
||||||
|
@ -89,7 +89,7 @@ import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
|
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
|
||||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
|
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
|
||||||
import org.elasticsearch.search.profile.ProfileResult;
|
import org.elasticsearch.search.profile.ProfileResult;
|
||||||
import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult;
|
import org.elasticsearch.search.profile.SearchProfileShardResult;
|
||||||
import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResult;
|
import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResult;
|
||||||
import org.elasticsearch.search.profile.query.CollectorResult;
|
import org.elasticsearch.search.profile.query.CollectorResult;
|
||||||
import org.elasticsearch.search.profile.query.QueryProfileShardResult;
|
import org.elasticsearch.search.profile.query.QueryProfileShardResult;
|
||||||
|
@ -499,15 +499,15 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
|
|
||||||
SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT);
|
SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT);
|
||||||
// tag::search-request-profiling-get
|
// tag::search-request-profiling-get
|
||||||
Map<String, SearchProfileQueryPhaseResult> profilingResults =
|
Map<String, SearchProfileShardResult> profilingResults =
|
||||||
searchResponse.getProfileResults(); // <1>
|
searchResponse.getProfileResults(); // <1>
|
||||||
for (Map.Entry<String, SearchProfileQueryPhaseResult> profilingResult : profilingResults.entrySet()) { // <2>
|
for (Map.Entry<String, SearchProfileShardResult> profilingResult : profilingResults.entrySet()) { // <2>
|
||||||
String key = profilingResult.getKey(); // <3>
|
String key = profilingResult.getKey(); // <3>
|
||||||
SearchProfileQueryPhaseResult profileShardResult = profilingResult.getValue(); // <4>
|
SearchProfileShardResult profileShardResult = profilingResult.getValue(); // <4>
|
||||||
}
|
}
|
||||||
// end::search-request-profiling-get
|
// end::search-request-profiling-get
|
||||||
|
|
||||||
SearchProfileQueryPhaseResult profileShardResult = profilingResults.values().iterator().next();
|
SearchProfileShardResult profileShardResult = profilingResults.values().iterator().next();
|
||||||
assertNotNull(profileShardResult);
|
assertNotNull(profileShardResult);
|
||||||
|
|
||||||
// tag::search-request-profiling-queries
|
// tag::search-request-profiling-queries
|
||||||
|
|
|
@ -163,7 +163,37 @@ The API returns the following result:
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"aggregations": []
|
"aggregations": [],
|
||||||
|
"fetch": {
|
||||||
|
"type": "fetch",
|
||||||
|
"description": "",
|
||||||
|
"time_in_nanos": 660555,
|
||||||
|
"breakdown": {
|
||||||
|
"next_reader": 7292,
|
||||||
|
"next_reader_count": 1,
|
||||||
|
"load_stored_fields": 299325,
|
||||||
|
"load_stored_fields_count": 5
|
||||||
|
},
|
||||||
|
"debug": {
|
||||||
|
"stored_fields": ["_id", "_routing", "_source"]
|
||||||
|
},
|
||||||
|
"children": [
|
||||||
|
{
|
||||||
|
"type": "FetchSourcePhase",
|
||||||
|
"description": "",
|
||||||
|
"time_in_nanos": 20443,
|
||||||
|
"breakdown": {
|
||||||
|
"next_reader": 745,
|
||||||
|
"next_reader_count": 1,
|
||||||
|
"process": 19698,
|
||||||
|
"process_count": 5
|
||||||
|
},
|
||||||
|
"debug": {
|
||||||
|
"fast_path": 5
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@ -196,7 +226,8 @@ The overall structure of the profile response is as follows:
|
||||||
"collector": [...] <4>
|
"collector": [...] <4>
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"aggregations": [...] <5>
|
"aggregations": [...], <5>
|
||||||
|
"fetch": {...} <6>
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@ -208,15 +239,14 @@ The overall structure of the profile response is as follows:
|
||||||
// TESTRESPONSE[s/"query": \[...\]/"query": $body.$_path/]
|
// TESTRESPONSE[s/"query": \[...\]/"query": $body.$_path/]
|
||||||
// TESTRESPONSE[s/"collector": \[...\]/"collector": $body.$_path/]
|
// TESTRESPONSE[s/"collector": \[...\]/"collector": $body.$_path/]
|
||||||
// TESTRESPONSE[s/"aggregations": \[...\]/"aggregations": []/]
|
// TESTRESPONSE[s/"aggregations": \[...\]/"aggregations": []/]
|
||||||
|
// TESTRESPONSE[s/"fetch": \{...\}/"fetch": $body.$_path/]
|
||||||
<1> A profile is returned for each shard that participated in the response, and
|
<1> A profile is returned for each shard that participated in the response, and
|
||||||
is identified by a unique ID.
|
is identified by a unique ID.
|
||||||
<2> Each profile contains a section which holds details about the query
|
<2> Query timings and other debugging information.
|
||||||
execution.
|
<3> The cumulative rewrite time.
|
||||||
<3> Each profile has a single time representing the cumulative rewrite time.
|
<4> Names and invocation timings for each collector.
|
||||||
<4> Each profile also contains a section about the Lucene Collectors which run
|
<5> Aggregation timings, invocation counts, and debug information.
|
||||||
the search.
|
<6> Fetch timing and debug information.
|
||||||
<5> Each profile contains a section which holds the details about the
|
|
||||||
aggregation execution.
|
|
||||||
|
|
||||||
Because a search request may be executed against one or more shards in an index,
|
Because a search request may be executed against one or more shards in an index,
|
||||||
and a search may cover one or more indices, the top level element in the profile
|
and a search may cover one or more indices, the top level element in the profile
|
||||||
|
@ -295,7 +325,7 @@ Using our previous `match` query example, let's analyze the `query` section:
|
||||||
]
|
]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// TESTRESPONSE[s/^/{\n"took": $body.took,\n"timed_out": $body.timed_out,\n"_shards": $body._shards,\n"hits": $body.hits,\n"profile": {\n"shards": [ {\n"id": "$body.$_path",\n"searches": [{\n/]
|
// TESTRESPONSE[s/^/{\n"took": $body.took,\n"timed_out": $body.timed_out,\n"_shards": $body._shards,\n"hits": $body.hits,\n"profile": {\n"shards": [ {\n"id": "$body.$_path",\n"searches": [{\n/]
|
||||||
// TESTRESPONSE[s/]$/],"rewrite_time": $body.$_path, "collector": $body.$_path}], "aggregations": []}]}}/]
|
// TESTRESPONSE[s/]$/],"rewrite_time": $body.$_path, "collector": $body.$_path}], "aggregations": [], "fetch": $body.$_path}]}}/]
|
||||||
// TESTRESPONSE[s/(?<=[" ])\d+(\.\d+)?/$body.$_path/]
|
// TESTRESPONSE[s/(?<=[" ])\d+(\.\d+)?/$body.$_path/]
|
||||||
// TESTRESPONSE[s/"breakdown": \{...\}/"breakdown": $body.$_path/]
|
// TESTRESPONSE[s/"breakdown": \{...\}/"breakdown": $body.$_path/]
|
||||||
<1> The breakdown timings are omitted for simplicity.
|
<1> The breakdown timings are omitted for simplicity.
|
||||||
|
@ -347,7 +377,7 @@ Lucene execution:
|
||||||
}
|
}
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// TESTRESPONSE[s/^/{\n"took": $body.took,\n"timed_out": $body.timed_out,\n"_shards": $body._shards,\n"hits": $body.hits,\n"profile": {\n"shards": [ {\n"id": "$body.$_path",\n"searches": [{\n"query": [{\n"type": "BooleanQuery",\n"description": "message:get message:search",\n"time_in_nanos": $body.$_path,/]
|
// TESTRESPONSE[s/^/{\n"took": $body.took,\n"timed_out": $body.timed_out,\n"_shards": $body._shards,\n"hits": $body.hits,\n"profile": {\n"shards": [ {\n"id": "$body.$_path",\n"searches": [{\n"query": [{\n"type": "BooleanQuery",\n"description": "message:get message:search",\n"time_in_nanos": $body.$_path,/]
|
||||||
// TESTRESPONSE[s/}$/},\n"children": $body.$_path}],\n"rewrite_time": $body.$_path, "collector": $body.$_path}], "aggregations": []}]}}/]
|
// TESTRESPONSE[s/}$/},\n"children": $body.$_path}],\n"rewrite_time": $body.$_path, "collector": $body.$_path}], "aggregations": [], "fetch": $body.$_path}]}}/]
|
||||||
// TESTRESPONSE[s/(?<=[" ])\d+(\.\d+)?/$body.$_path/]
|
// TESTRESPONSE[s/(?<=[" ])\d+(\.\d+)?/$body.$_path/]
|
||||||
|
|
||||||
Timings are listed in wall-clock nanoseconds and are not normalized at all. All
|
Timings are listed in wall-clock nanoseconds and are not normalized at all. All
|
||||||
|
@ -448,7 +478,7 @@ Looking at the previous example:
|
||||||
]
|
]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// TESTRESPONSE[s/^/{\n"took": $body.took,\n"timed_out": $body.timed_out,\n"_shards": $body._shards,\n"hits": $body.hits,\n"profile": {\n"shards": [ {\n"id": "$body.$_path",\n"searches": [{\n"query": $body.$_path,\n"rewrite_time": $body.$_path,/]
|
// TESTRESPONSE[s/^/{\n"took": $body.took,\n"timed_out": $body.timed_out,\n"_shards": $body._shards,\n"hits": $body.hits,\n"profile": {\n"shards": [ {\n"id": "$body.$_path",\n"searches": [{\n"query": $body.$_path,\n"rewrite_time": $body.$_path,/]
|
||||||
// TESTRESPONSE[s/]$/]}], "aggregations": []}]}}/]
|
// TESTRESPONSE[s/]$/]}], "aggregations": [], "fetch": $body.$_path}]}}/]
|
||||||
// TESTRESPONSE[s/(?<=[" ])\d+(\.\d+)?/$body.$_path/]
|
// TESTRESPONSE[s/(?<=[" ])\d+(\.\d+)?/$body.$_path/]
|
||||||
|
|
||||||
|
|
||||||
|
@ -569,7 +599,7 @@ GET /my-index-000001/_search
|
||||||
}
|
}
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// TEST[setup:my_index]
|
// TEST[setup:my_index]
|
||||||
// TEST[s/_search/_search\?filter_path=profile.shards.id,profile.shards.searches,profile.shards.aggregations/]
|
// TEST[s/_search/_search\?filter_path=profile.shards.id,profile.shards.searches,profile.shards.aggregations,profile.shards.fetch/]
|
||||||
|
|
||||||
|
|
||||||
This example has:
|
This example has:
|
||||||
|
@ -673,13 +703,15 @@ The API returns the following result:
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"aggregations": [...] <1>
|
"aggregations": [...], <1>
|
||||||
|
"fetch": {...}
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// TESTRESPONSE[s/"aggregations": \[\.\.\.\]/"aggregations": $body.$_path/]
|
// TESTRESPONSE[s/"aggregations": \[\.\.\.\]/"aggregations": $body.$_path/]
|
||||||
|
// TESTRESPONSE[s/"fetch": \{\.\.\.\}/"fetch": $body.$_path/]
|
||||||
// TESTRESPONSE[s/\.\.\.//]
|
// TESTRESPONSE[s/\.\.\.//]
|
||||||
// TESTRESPONSE[s/(?<=[" ])\d+(\.\d+)?/$body.$_path/]
|
// TESTRESPONSE[s/(?<=[" ])\d+(\.\d+)?/$body.$_path/]
|
||||||
// TESTRESPONSE[s/"id": "\[P6-vulHtQRWuD4YnubWb7A\]\[my-index-000001\]\[0\]"/"id": $body.profile.shards.0.id/]
|
// TESTRESPONSE[s/"id": "\[P6-vulHtQRWuD4YnubWb7A\]\[my-index-000001\]\[0\]"/"id": $body.profile.shards.0.id/]
|
||||||
|
@ -918,6 +950,99 @@ to give you a feel for A) what machinery in {es} is actually eating time, and B)
|
||||||
the magnitude of differences in times between the various components. Like the
|
the magnitude of differences in times between the various components. Like the
|
||||||
overall time, the breakdown is inclusive of all children times.
|
overall time, the breakdown is inclusive of all children times.
|
||||||
|
|
||||||
|
[[profiling-fetch]]
|
||||||
|
===== Profiling Fetch
|
||||||
|
|
||||||
|
|
||||||
|
All shards the fetched documents will have a `fetch` section in the profile.
|
||||||
|
Let's execute a small search and have a look a the fetch profile:
|
||||||
|
|
||||||
|
[source,console]
|
||||||
|
----
|
||||||
|
GET /my-index-000001/_search?filter_path=profile.shards.fetch
|
||||||
|
{
|
||||||
|
"profile": true,
|
||||||
|
"query": {
|
||||||
|
"term": {
|
||||||
|
"user.id": {
|
||||||
|
"value": "elkbee"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
----
|
||||||
|
// TEST[continued]
|
||||||
|
|
||||||
|
And here is the fetch profile:
|
||||||
|
|
||||||
|
[source,console-result]
|
||||||
|
----
|
||||||
|
{
|
||||||
|
"profile": {
|
||||||
|
"shards": [
|
||||||
|
{
|
||||||
|
"fetch": {
|
||||||
|
"type": "fetch",
|
||||||
|
"description": "",
|
||||||
|
"time_in_nanos": 660555,
|
||||||
|
"breakdown": {
|
||||||
|
"next_reader": 7292,
|
||||||
|
"next_reader_count": 1,
|
||||||
|
"load_stored_fields": 299325,
|
||||||
|
"load_stored_fields_count": 5
|
||||||
|
},
|
||||||
|
"debug": {
|
||||||
|
"stored_fields": ["_id", "_routing", "_source"]
|
||||||
|
},
|
||||||
|
"children": [
|
||||||
|
{
|
||||||
|
"type": "FetchSourcePhase",
|
||||||
|
"description": "",
|
||||||
|
"time_in_nanos": 20443,
|
||||||
|
"breakdown": {
|
||||||
|
"next_reader": 745,
|
||||||
|
"next_reader_count": 1,
|
||||||
|
"process": 19698,
|
||||||
|
"process_count": 5
|
||||||
|
},
|
||||||
|
"debug": {
|
||||||
|
"fast_path": 4
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
----
|
||||||
|
// TESTRESPONSE[s/(?<=[" ])\d+(\.\d+)?/$body.$_path/]
|
||||||
|
|
||||||
|
Since this is debugging information about the way that Elasticsearch executes
|
||||||
|
the fetch it can change from request to request and version to version. Even
|
||||||
|
patch versions may change the output here. That lack of consistency is what
|
||||||
|
makes it useful for debugging.
|
||||||
|
|
||||||
|
Anyway! `time_in_nanos` measures the time total time of the fetch phase.
|
||||||
|
The `breakdown` counts and times the our
|
||||||
|
per-link:{glossary}/terms.html#glossary-segment[segment] preparation in
|
||||||
|
`next_reader` and the time taken loading stored fields in `load_stored_fields`.
|
||||||
|
Debug contains miscellaneous non-timing information, specifically
|
||||||
|
`stored_fields` lists the stored fields that fetch will have to load. If it is
|
||||||
|
an empty list then fetch will entirely skip loading stored fields.
|
||||||
|
|
||||||
|
The `children` section lists the sub-phases that do the actual fetching work
|
||||||
|
and the `breakdown` has counts and timings for the
|
||||||
|
per-link:{glossary}/terms.html#glossary-segment[segment] preparation in
|
||||||
|
`next_reader` and the per document fetching in `process`.
|
||||||
|
|
||||||
|
NOTE: We try hard to load all of the stored fields that we will need for the
|
||||||
|
fetch up front. This tends to make the `_source` phase a couple of microseconds
|
||||||
|
per hit. In that case the true cost of `_source` phase is hidden in the
|
||||||
|
`load_stored_fields` component of the breakdown. It's possible to entirely skip
|
||||||
|
loading stored fields by setting
|
||||||
|
`"_source": false, "stored_fields": ["_none_"]`.
|
||||||
|
|
||||||
[[profiling-considerations]]
|
[[profiling-considerations]]
|
||||||
===== Profiling Considerations
|
===== Profiling Considerations
|
||||||
|
|
||||||
|
@ -936,16 +1061,13 @@ have a drastic effect compared to other components in the profiled query.
|
||||||
[[profile-limitations]]
|
[[profile-limitations]]
|
||||||
===== Limitations
|
===== Limitations
|
||||||
|
|
||||||
- Profiling currently does not measure the search fetch phase nor the network
|
- Profiling currently does not measure the network overhead.
|
||||||
overhead.
|
|
||||||
- Profiling also does not account for time spent in the queue, merging shard
|
- Profiling also does not account for time spent in the queue, merging shard
|
||||||
responses on the coordinating node, or additional work such as building global
|
responses on the coordinating node, or additional work such as building global
|
||||||
ordinals (an internal data structure used to speed up search).
|
ordinals (an internal data structure used to speed up search).
|
||||||
- Profiling statistics are currently not available for suggestions,
|
- Profiling statistics are currently not available for suggestions,
|
||||||
highlighting, `dfs_query_then_fetch`.
|
highlighting, `dfs_query_then_fetch`.
|
||||||
- Profiling of the reduce phase of aggregation is currently not available.
|
- Profiling of the reduce phase of aggregation is currently not available.
|
||||||
- The Profiler is still highly experimental. The Profiler is instrumenting parts
|
- The Profiler is instrumenting internals that can change from version to
|
||||||
of Lucene that were never designed to be exposed in this manner, and so all
|
version. The resulting json should be considered mostly unstable, especially
|
||||||
results should be viewed as a best effort to provide detailed diagnostics. We
|
things in the `debug` section.
|
||||||
hope to improve this over time. If you find obviously wrong numbers, strange
|
|
||||||
query structures, or other bugs, please report them!
|
|
||||||
|
|
|
@ -116,3 +116,39 @@ teardown:
|
||||||
query:
|
query:
|
||||||
match_all: {}
|
match_all: {}
|
||||||
inner_hits: {}
|
inner_hits: {}
|
||||||
|
|
||||||
|
---
|
||||||
|
profile fetch:
|
||||||
|
- skip:
|
||||||
|
version: ' - 7.99.99'
|
||||||
|
reason: fetch profiling implemented in 8.0.0 to be backported to 7.16.0
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
index: test
|
||||||
|
body:
|
||||||
|
profile: true
|
||||||
|
query:
|
||||||
|
has_parent:
|
||||||
|
parent_type: question
|
||||||
|
query:
|
||||||
|
match_all: {}
|
||||||
|
inner_hits: {}
|
||||||
|
|
||||||
|
- gt: { profile.shards.0.fetch.time_in_nanos: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.breakdown.next_reader_count: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.breakdown.next_reader: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.breakdown.load_stored_fields_count: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.breakdown.load_stored_fields: 0 }
|
||||||
|
- match: { profile.shards.0.fetch.debug.stored_fields: [_id, _routing, _source] }
|
||||||
|
- length: { profile.shards.0.fetch.children: 2 }
|
||||||
|
- match: { profile.shards.0.fetch.children.0.type: FetchSourcePhase }
|
||||||
|
- gt: { profile.shards.0.fetch.children.0.breakdown.next_reader_count: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.children.0.breakdown.next_reader: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.children.0.breakdown.next_reader_count: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.children.0.breakdown.next_reader: 0 }
|
||||||
|
- match: { profile.shards.0.fetch.children.1.type: InnerHitsPhase }
|
||||||
|
- gt: { profile.shards.0.fetch.children.1.breakdown.next_reader_count: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.children.1.breakdown.next_reader: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.children.1.breakdown.next_reader_count: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.children.1.breakdown.next_reader: 0 }
|
||||||
|
|
|
@ -97,10 +97,13 @@ import java.util.concurrent.TimeUnit;
|
||||||
import java.util.concurrent.atomic.AtomicReference;
|
import java.util.concurrent.atomic.AtomicReference;
|
||||||
import java.util.function.Consumer;
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
|
import static java.util.stream.Collectors.toList;
|
||||||
import static org.hamcrest.CoreMatchers.equalTo;
|
import static org.hamcrest.CoreMatchers.equalTo;
|
||||||
import static org.hamcrest.CoreMatchers.instanceOf;
|
import static org.hamcrest.CoreMatchers.instanceOf;
|
||||||
|
import static org.hamcrest.Matchers.empty;
|
||||||
import static org.hamcrest.Matchers.greaterThan;
|
import static org.hamcrest.Matchers.greaterThan;
|
||||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||||
|
import static org.hamcrest.Matchers.not;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This test class executes twice, first against the remote cluster, and then against another cluster that has the remote cluster
|
* This test class executes twice, first against the remote cluster, and then against another cluster that has the remote cluster
|
||||||
|
@ -405,6 +408,10 @@ public class CCSDuelIT extends ESRestTestCase {
|
||||||
duelSearch(searchRequest, response -> {
|
duelSearch(searchRequest, response -> {
|
||||||
assertHits(response);
|
assertHits(response);
|
||||||
assertFalse(response.getProfileResults().isEmpty());
|
assertFalse(response.getProfileResults().isEmpty());
|
||||||
|
assertThat(
|
||||||
|
response.getProfileResults().values().stream().filter(sr -> sr.getFetchPhase() != null).collect(toList()),
|
||||||
|
not(empty())
|
||||||
|
);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -813,6 +820,14 @@ public class CCSDuelIT extends ESRestTestCase {
|
||||||
List<Map<String, Object>> shards = (List <Map<String, Object>>)profile.get("shards");
|
List<Map<String, Object>> shards = (List <Map<String, Object>>)profile.get("shards");
|
||||||
for (Map<String, Object> shard : shards) {
|
for (Map<String, Object> shard : shards) {
|
||||||
replaceProfileTime(shard);
|
replaceProfileTime(shard);
|
||||||
|
/*
|
||||||
|
* The way we try to reduce round trips is by fetching all
|
||||||
|
* of the results we could possibly need from the remote
|
||||||
|
* cluster and then merging *those* together locally. This
|
||||||
|
* will end up fetching more documents total. So we can't
|
||||||
|
* really compare the fetch profiles here.
|
||||||
|
*/
|
||||||
|
shard.remove("fetch");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return responseMap;
|
return responseMap;
|
||||||
|
|
|
@ -0,0 +1,146 @@
|
||||||
|
---
|
||||||
|
setup:
|
||||||
|
- do:
|
||||||
|
indices.create:
|
||||||
|
index: test
|
||||||
|
body:
|
||||||
|
settings:
|
||||||
|
index.number_of_shards: 1
|
||||||
|
mappings:
|
||||||
|
properties:
|
||||||
|
keyword:
|
||||||
|
type: keyword
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: test
|
||||||
|
id: 1
|
||||||
|
refresh: true
|
||||||
|
body:
|
||||||
|
keyword: [ "a", "b" ]
|
||||||
|
|
||||||
|
---
|
||||||
|
fetch fields:
|
||||||
|
- skip:
|
||||||
|
version: ' - 7.99.99'
|
||||||
|
reason: fetch profiling implemented in 8.0.0 to be backported to 7.16.0
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
index: test
|
||||||
|
body:
|
||||||
|
_source: false
|
||||||
|
profile: true
|
||||||
|
fields: [keyword]
|
||||||
|
|
||||||
|
- is_true: hits.hits.0._id
|
||||||
|
- match: { hits.hits.0.fields.keyword.0: a }
|
||||||
|
- match: { hits.hits.0.fields.keyword.1: b }
|
||||||
|
- gt: { profile.shards.0.fetch.time_in_nanos: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.breakdown.next_reader_count: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.breakdown.next_reader: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.breakdown.load_stored_fields_count: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.breakdown.load_stored_fields: 0 }
|
||||||
|
- match: { profile.shards.0.fetch.debug.stored_fields: [_id, _routing, _source] }
|
||||||
|
- length: { profile.shards.0.fetch.children: 1 }
|
||||||
|
- match: { profile.shards.0.fetch.children.0.type: FetchFieldsPhase }
|
||||||
|
- gt: { profile.shards.0.fetch.children.0.breakdown.next_reader_count: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.children.0.breakdown.next_reader: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.children.0.breakdown.next_reader_count: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.children.0.breakdown.next_reader: 0 }
|
||||||
|
|
||||||
|
---
|
||||||
|
fetch source:
|
||||||
|
- skip:
|
||||||
|
version: ' - 7.99.99'
|
||||||
|
reason: fetch profiling implemented in 8.0.0 to be backported to 7.16.0
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
index: test
|
||||||
|
body:
|
||||||
|
profile: true
|
||||||
|
|
||||||
|
- is_true: hits.hits.0._id
|
||||||
|
- match: { hits.hits.0._source.keyword.0: a }
|
||||||
|
- match: { hits.hits.0._source.keyword.1: b }
|
||||||
|
- gt: { profile.shards.0.fetch.time_in_nanos: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.breakdown.next_reader_count: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.breakdown.next_reader: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.breakdown.load_stored_fields_count: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.breakdown.load_stored_fields: 0 }
|
||||||
|
- match: { profile.shards.0.fetch.debug.stored_fields: [_id, _routing, _source] }
|
||||||
|
- length: { profile.shards.0.fetch.children: 1 }
|
||||||
|
- match: { profile.shards.0.fetch.children.0.type: FetchSourcePhase }
|
||||||
|
- gt: { profile.shards.0.fetch.children.0.breakdown.next_reader_count: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.children.0.breakdown.next_reader: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.children.0.breakdown.next_reader_count: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.children.0.breakdown.next_reader: 0 }
|
||||||
|
- match: { profile.shards.0.fetch.children.0.debug.fast_path: 1 }
|
||||||
|
|
||||||
|
---
|
||||||
|
fetch nested source:
|
||||||
|
- skip:
|
||||||
|
version: ' - 7.99.99'
|
||||||
|
reason: fetch profiling implemented in 8.0.0 to be backported to 7.16.0
|
||||||
|
|
||||||
|
- do:
|
||||||
|
indices.create:
|
||||||
|
index: test_nested
|
||||||
|
body:
|
||||||
|
settings:
|
||||||
|
index.number_of_shards: 1
|
||||||
|
mappings:
|
||||||
|
properties:
|
||||||
|
keyword:
|
||||||
|
type: keyword
|
||||||
|
nested:
|
||||||
|
type: nested
|
||||||
|
properties:
|
||||||
|
text:
|
||||||
|
type: text
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: test_nested
|
||||||
|
id: 1
|
||||||
|
refresh: true
|
||||||
|
body:
|
||||||
|
keyword: [ "a", "b" ]
|
||||||
|
nested:
|
||||||
|
- text: the quick brown fox
|
||||||
|
- text: jumped over the
|
||||||
|
- text: lazy dog
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
index: test_nested
|
||||||
|
body:
|
||||||
|
profile: true
|
||||||
|
query:
|
||||||
|
nested:
|
||||||
|
path: nested
|
||||||
|
query:
|
||||||
|
match_all: {}
|
||||||
|
inner_hits: {}
|
||||||
|
|
||||||
|
- is_true: hits.hits.0._id
|
||||||
|
- match: { hits.hits.0._source.keyword.0: a }
|
||||||
|
- match: { hits.hits.0._source.keyword.1: b }
|
||||||
|
- gt: { profile.shards.0.fetch.time_in_nanos: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.breakdown.next_reader_count: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.breakdown.next_reader: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.breakdown.load_stored_fields_count: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.breakdown.load_stored_fields: 0 }
|
||||||
|
- match: { profile.shards.0.fetch.debug.stored_fields: [_id, _routing, _source] }
|
||||||
|
- length: { profile.shards.0.fetch.children: 2 }
|
||||||
|
- match: { profile.shards.0.fetch.children.0.type: FetchSourcePhase }
|
||||||
|
- gt: { profile.shards.0.fetch.children.0.breakdown.next_reader_count: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.children.0.breakdown.next_reader: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.children.0.breakdown.next_reader_count: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.children.0.breakdown.next_reader: 0 }
|
||||||
|
- match: { profile.shards.0.fetch.children.1.type: InnerHitsPhase }
|
||||||
|
- gt: { profile.shards.0.fetch.children.1.breakdown.next_reader_count: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.children.1.breakdown.next_reader: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.children.1.breakdown.next_reader_count: 0 }
|
||||||
|
- gt: { profile.shards.0.fetch.children.1.breakdown.next_reader: 0 }
|
|
@ -23,7 +23,7 @@ import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedOrdinalsS
|
||||||
import org.elasticsearch.search.aggregations.bucket.terms.GlobalOrdinalsStringTermsAggregator;
|
import org.elasticsearch.search.aggregations.bucket.terms.GlobalOrdinalsStringTermsAggregator;
|
||||||
import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder;
|
import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder;
|
||||||
import org.elasticsearch.search.profile.ProfileResult;
|
import org.elasticsearch.search.profile.ProfileResult;
|
||||||
import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult;
|
import org.elasticsearch.search.profile.SearchProfileShardResult;
|
||||||
import org.elasticsearch.test.ESIntegTestCase;
|
import org.elasticsearch.test.ESIntegTestCase;
|
||||||
import org.joda.time.Instant;
|
import org.joda.time.Instant;
|
||||||
|
|
||||||
|
@ -119,10 +119,10 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
SearchResponse response = client().prepareSearch("idx").setProfile(true)
|
SearchResponse response = client().prepareSearch("idx").setProfile(true)
|
||||||
.addAggregation(histogram("histo").field(NUMBER_FIELD).interval(1L)).get();
|
.addAggregation(histogram("histo").field(NUMBER_FIELD).interval(1L)).get();
|
||||||
assertSearchResponse(response);
|
assertSearchResponse(response);
|
||||||
Map<String, SearchProfileQueryPhaseResult> profileResults = response.getProfileResults();
|
Map<String, SearchProfileShardResult> profileResults = response.getProfileResults();
|
||||||
assertThat(profileResults, notNullValue());
|
assertThat(profileResults, notNullValue());
|
||||||
assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries));
|
assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries));
|
||||||
for (SearchProfileQueryPhaseResult profileShardResult : profileResults.values()) {
|
for (SearchProfileShardResult profileShardResult : profileResults.values()) {
|
||||||
assertThat(profileShardResult, notNullValue());
|
assertThat(profileShardResult, notNullValue());
|
||||||
AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults();
|
AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults();
|
||||||
assertThat(aggProfileResults, notNullValue());
|
assertThat(aggProfileResults, notNullValue());
|
||||||
|
@ -164,10 +164,10 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
)
|
)
|
||||||
).get();
|
).get();
|
||||||
assertSearchResponse(response);
|
assertSearchResponse(response);
|
||||||
Map<String, SearchProfileQueryPhaseResult> profileResults = response.getProfileResults();
|
Map<String, SearchProfileShardResult> profileResults = response.getProfileResults();
|
||||||
assertThat(profileResults, notNullValue());
|
assertThat(profileResults, notNullValue());
|
||||||
assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries));
|
assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries));
|
||||||
for (SearchProfileQueryPhaseResult profileShardResult : profileResults.values()) {
|
for (SearchProfileShardResult profileShardResult : profileResults.values()) {
|
||||||
assertThat(profileShardResult, notNullValue());
|
assertThat(profileShardResult, notNullValue());
|
||||||
AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults();
|
AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults();
|
||||||
assertThat(aggProfileResults, notNullValue());
|
assertThat(aggProfileResults, notNullValue());
|
||||||
|
@ -247,10 +247,10 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
.collectMode(SubAggCollectionMode.BREADTH_FIRST).field(TAG_FIELD).subAggregation(avg("avg").field(NUMBER_FIELD))))
|
.collectMode(SubAggCollectionMode.BREADTH_FIRST).field(TAG_FIELD).subAggregation(avg("avg").field(NUMBER_FIELD))))
|
||||||
.get();
|
.get();
|
||||||
assertSearchResponse(response);
|
assertSearchResponse(response);
|
||||||
Map<String, SearchProfileQueryPhaseResult> profileResults = response.getProfileResults();
|
Map<String, SearchProfileShardResult> profileResults = response.getProfileResults();
|
||||||
assertThat(profileResults, notNullValue());
|
assertThat(profileResults, notNullValue());
|
||||||
assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries));
|
assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries));
|
||||||
for (SearchProfileQueryPhaseResult profileShardResult : profileResults.values()) {
|
for (SearchProfileShardResult profileShardResult : profileResults.values()) {
|
||||||
assertThat(profileShardResult, notNullValue());
|
assertThat(profileShardResult, notNullValue());
|
||||||
AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults();
|
AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults();
|
||||||
assertThat(aggProfileResults, notNullValue());
|
assertThat(aggProfileResults, notNullValue());
|
||||||
|
@ -317,10 +317,10 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
.subAggregation(max("max").field(NUMBER_FIELD)))
|
.subAggregation(max("max").field(NUMBER_FIELD)))
|
||||||
.get();
|
.get();
|
||||||
assertSearchResponse(response);
|
assertSearchResponse(response);
|
||||||
Map<String, SearchProfileQueryPhaseResult> profileResults = response.getProfileResults();
|
Map<String, SearchProfileShardResult> profileResults = response.getProfileResults();
|
||||||
assertThat(profileResults, notNullValue());
|
assertThat(profileResults, notNullValue());
|
||||||
assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries));
|
assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries));
|
||||||
for (SearchProfileQueryPhaseResult profileShardResult : profileResults.values()) {
|
for (SearchProfileShardResult profileShardResult : profileResults.values()) {
|
||||||
assertThat(profileShardResult, notNullValue());
|
assertThat(profileShardResult, notNullValue());
|
||||||
AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults();
|
AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults();
|
||||||
assertThat(aggProfileResults, notNullValue());
|
assertThat(aggProfileResults, notNullValue());
|
||||||
|
@ -377,10 +377,10 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
.subAggregation(max("max").field(NUMBER_FIELD)))))
|
.subAggregation(max("max").field(NUMBER_FIELD)))))
|
||||||
.get();
|
.get();
|
||||||
assertSearchResponse(response);
|
assertSearchResponse(response);
|
||||||
Map<String, SearchProfileQueryPhaseResult> profileResults = response.getProfileResults();
|
Map<String, SearchProfileShardResult> profileResults = response.getProfileResults();
|
||||||
assertThat(profileResults, notNullValue());
|
assertThat(profileResults, notNullValue());
|
||||||
assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries));
|
assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries));
|
||||||
for (SearchProfileQueryPhaseResult profileShardResult : profileResults.values()) {
|
for (SearchProfileShardResult profileShardResult : profileResults.values()) {
|
||||||
assertThat(profileShardResult, notNullValue());
|
assertThat(profileShardResult, notNullValue());
|
||||||
AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults();
|
AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults();
|
||||||
assertThat(aggProfileResults, notNullValue());
|
assertThat(aggProfileResults, notNullValue());
|
||||||
|
@ -581,7 +581,7 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
.subAggregation(max("max").field(NUMBER_FIELD)))))
|
.subAggregation(max("max").field(NUMBER_FIELD)))))
|
||||||
.get();
|
.get();
|
||||||
assertSearchResponse(response);
|
assertSearchResponse(response);
|
||||||
Map<String, SearchProfileQueryPhaseResult> profileResults = response.getProfileResults();
|
Map<String, SearchProfileShardResult> profileResults = response.getProfileResults();
|
||||||
assertThat(profileResults, notNullValue());
|
assertThat(profileResults, notNullValue());
|
||||||
assertThat(profileResults.size(), equalTo(0));
|
assertThat(profileResults.size(), equalTo(0));
|
||||||
}
|
}
|
||||||
|
@ -611,10 +611,10 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
.subAggregation(new MaxAggregationBuilder("m").field("date")))
|
.subAggregation(new MaxAggregationBuilder("m").field("date")))
|
||||||
.get();
|
.get();
|
||||||
assertSearchResponse(response);
|
assertSearchResponse(response);
|
||||||
Map<String, SearchProfileQueryPhaseResult> profileResults = response.getProfileResults();
|
Map<String, SearchProfileShardResult> profileResults = response.getProfileResults();
|
||||||
assertThat(profileResults, notNullValue());
|
assertThat(profileResults, notNullValue());
|
||||||
assertThat(profileResults.size(), equalTo(getNumShards("dateidx").numPrimaries));
|
assertThat(profileResults.size(), equalTo(getNumShards("dateidx").numPrimaries));
|
||||||
for (SearchProfileQueryPhaseResult profileShardResult : profileResults.values()) {
|
for (SearchProfileShardResult profileShardResult : profileResults.values()) {
|
||||||
assertThat(profileShardResult, notNullValue());
|
assertThat(profileShardResult, notNullValue());
|
||||||
AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults();
|
AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults();
|
||||||
assertThat(aggProfileResults, notNullValue());
|
assertThat(aggProfileResults, notNullValue());
|
||||||
|
@ -698,10 +698,10 @@ public class AggregationProfilerIT extends ESIntegTestCase {
|
||||||
.addAggregation(new DateHistogramAggregationBuilder("histo").field("date").calendarInterval(DateHistogramInterval.MONTH))
|
.addAggregation(new DateHistogramAggregationBuilder("histo").field("date").calendarInterval(DateHistogramInterval.MONTH))
|
||||||
.get();
|
.get();
|
||||||
assertSearchResponse(response);
|
assertSearchResponse(response);
|
||||||
Map<String, SearchProfileQueryPhaseResult> profileResults = response.getProfileResults();
|
Map<String, SearchProfileShardResult> profileResults = response.getProfileResults();
|
||||||
assertThat(profileResults, notNullValue());
|
assertThat(profileResults, notNullValue());
|
||||||
assertThat(profileResults.size(), equalTo(getNumShards("date_filter_by_filter_disabled").numPrimaries));
|
assertThat(profileResults.size(), equalTo(getNumShards("date_filter_by_filter_disabled").numPrimaries));
|
||||||
for (SearchProfileQueryPhaseResult profileShardResult : profileResults.values()) {
|
for (SearchProfileShardResult profileShardResult : profileResults.values()) {
|
||||||
assertThat(profileShardResult, notNullValue());
|
assertThat(profileShardResult, notNullValue());
|
||||||
AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults();
|
AggregationProfileShardResult aggProfileResults = profileShardResult.getAggregationProfileResults();
|
||||||
assertThat(aggProfileResults, notNullValue());
|
assertThat(aggProfileResults, notNullValue());
|
||||||
|
|
|
@ -20,7 +20,7 @@ import org.elasticsearch.index.query.QueryBuilder;
|
||||||
import org.elasticsearch.index.query.QueryBuilders;
|
import org.elasticsearch.index.query.QueryBuilders;
|
||||||
import org.elasticsearch.search.SearchHit;
|
import org.elasticsearch.search.SearchHit;
|
||||||
import org.elasticsearch.search.profile.ProfileResult;
|
import org.elasticsearch.search.profile.ProfileResult;
|
||||||
import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult;
|
import org.elasticsearch.search.profile.SearchProfileShardResult;
|
||||||
import org.elasticsearch.search.sort.SortOrder;
|
import org.elasticsearch.search.sort.SortOrder;
|
||||||
import org.elasticsearch.test.ESIntegTestCase;
|
import org.elasticsearch.test.ESIntegTestCase;
|
||||||
|
|
||||||
|
@ -77,7 +77,7 @@ public class QueryProfilerIT extends ESIntegTestCase {
|
||||||
|
|
||||||
assertNotNull("Profile response element should not be null", resp.getProfileResults());
|
assertNotNull("Profile response element should not be null", resp.getProfileResults());
|
||||||
assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0));
|
assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0));
|
||||||
for (Map.Entry<String, SearchProfileQueryPhaseResult> shard : resp.getProfileResults().entrySet()) {
|
for (Map.Entry<String, SearchProfileShardResult> shard : resp.getProfileResults().entrySet()) {
|
||||||
for (QueryProfileShardResult searchProfiles : shard.getValue().getQueryProfileResults()) {
|
for (QueryProfileShardResult searchProfiles : shard.getValue().getQueryProfileResults()) {
|
||||||
for (ProfileResult result : searchProfiles.getQueryResults()) {
|
for (ProfileResult result : searchProfiles.getQueryResults()) {
|
||||||
assertNotNull(result.getQueryName());
|
assertNotNull(result.getQueryName());
|
||||||
|
@ -210,11 +210,11 @@ public class QueryProfilerIT extends ESIntegTestCase {
|
||||||
.setSearchType(SearchType.QUERY_THEN_FETCH)
|
.setSearchType(SearchType.QUERY_THEN_FETCH)
|
||||||
.get();
|
.get();
|
||||||
|
|
||||||
Map<String, SearchProfileQueryPhaseResult> p = resp.getProfileResults();
|
Map<String, SearchProfileShardResult> p = resp.getProfileResults();
|
||||||
assertNotNull(p);
|
assertNotNull(p);
|
||||||
assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0));
|
assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0));
|
||||||
|
|
||||||
for (Map.Entry<String, SearchProfileQueryPhaseResult> shardResult : resp.getProfileResults().entrySet()) {
|
for (Map.Entry<String, SearchProfileShardResult> shardResult : resp.getProfileResults().entrySet()) {
|
||||||
for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) {
|
for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) {
|
||||||
for (ProfileResult result : searchProfiles.getQueryResults()) {
|
for (ProfileResult result : searchProfiles.getQueryResults()) {
|
||||||
assertEquals(result.getQueryName(), "TermQuery");
|
assertEquals(result.getQueryName(), "TermQuery");
|
||||||
|
@ -257,11 +257,11 @@ public class QueryProfilerIT extends ESIntegTestCase {
|
||||||
.setSearchType(SearchType.QUERY_THEN_FETCH)
|
.setSearchType(SearchType.QUERY_THEN_FETCH)
|
||||||
.get();
|
.get();
|
||||||
|
|
||||||
Map<String, SearchProfileQueryPhaseResult> p = resp.getProfileResults();
|
Map<String, SearchProfileShardResult> p = resp.getProfileResults();
|
||||||
assertNotNull(p);
|
assertNotNull(p);
|
||||||
assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0));
|
assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0));
|
||||||
|
|
||||||
for (Map.Entry<String, SearchProfileQueryPhaseResult> shardResult : resp.getProfileResults().entrySet()) {
|
for (Map.Entry<String, SearchProfileShardResult> shardResult : resp.getProfileResults().entrySet()) {
|
||||||
for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) {
|
for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) {
|
||||||
for (ProfileResult result : searchProfiles.getQueryResults()) {
|
for (ProfileResult result : searchProfiles.getQueryResults()) {
|
||||||
assertEquals(result.getQueryName(), "BooleanQuery");
|
assertEquals(result.getQueryName(), "BooleanQuery");
|
||||||
|
@ -329,7 +329,7 @@ public class QueryProfilerIT extends ESIntegTestCase {
|
||||||
assertNotNull("Profile response element should not be null", resp.getProfileResults());
|
assertNotNull("Profile response element should not be null", resp.getProfileResults());
|
||||||
assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0));
|
assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0));
|
||||||
|
|
||||||
for (Map.Entry<String, SearchProfileQueryPhaseResult> shardResult : resp.getProfileResults().entrySet()) {
|
for (Map.Entry<String, SearchProfileShardResult> shardResult : resp.getProfileResults().entrySet()) {
|
||||||
for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) {
|
for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) {
|
||||||
for (ProfileResult result : searchProfiles.getQueryResults()) {
|
for (ProfileResult result : searchProfiles.getQueryResults()) {
|
||||||
assertNotNull(result.getQueryName());
|
assertNotNull(result.getQueryName());
|
||||||
|
@ -381,7 +381,7 @@ public class QueryProfilerIT extends ESIntegTestCase {
|
||||||
assertNotNull("Profile response element should not be null", resp.getProfileResults());
|
assertNotNull("Profile response element should not be null", resp.getProfileResults());
|
||||||
assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0));
|
assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0));
|
||||||
|
|
||||||
for (Map.Entry<String, SearchProfileQueryPhaseResult> shardResult : resp.getProfileResults().entrySet()) {
|
for (Map.Entry<String, SearchProfileShardResult> shardResult : resp.getProfileResults().entrySet()) {
|
||||||
for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) {
|
for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) {
|
||||||
for (ProfileResult result : searchProfiles.getQueryResults()) {
|
for (ProfileResult result : searchProfiles.getQueryResults()) {
|
||||||
assertNotNull(result.getQueryName());
|
assertNotNull(result.getQueryName());
|
||||||
|
@ -428,7 +428,7 @@ public class QueryProfilerIT extends ESIntegTestCase {
|
||||||
assertNotNull("Profile response element should not be null", resp.getProfileResults());
|
assertNotNull("Profile response element should not be null", resp.getProfileResults());
|
||||||
assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0));
|
assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0));
|
||||||
|
|
||||||
for (Map.Entry<String, SearchProfileQueryPhaseResult> shardResult : resp.getProfileResults().entrySet()) {
|
for (Map.Entry<String, SearchProfileShardResult> shardResult : resp.getProfileResults().entrySet()) {
|
||||||
for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) {
|
for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) {
|
||||||
for (ProfileResult result : searchProfiles.getQueryResults()) {
|
for (ProfileResult result : searchProfiles.getQueryResults()) {
|
||||||
assertNotNull(result.getQueryName());
|
assertNotNull(result.getQueryName());
|
||||||
|
@ -475,7 +475,7 @@ public class QueryProfilerIT extends ESIntegTestCase {
|
||||||
assertNotNull("Profile response element should not be null", resp.getProfileResults());
|
assertNotNull("Profile response element should not be null", resp.getProfileResults());
|
||||||
assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0));
|
assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0));
|
||||||
|
|
||||||
for (Map.Entry<String, SearchProfileQueryPhaseResult> shardResult : resp.getProfileResults().entrySet()) {
|
for (Map.Entry<String, SearchProfileShardResult> shardResult : resp.getProfileResults().entrySet()) {
|
||||||
for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) {
|
for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) {
|
||||||
for (ProfileResult result : searchProfiles.getQueryResults()) {
|
for (ProfileResult result : searchProfiles.getQueryResults()) {
|
||||||
assertNotNull(result.getQueryName());
|
assertNotNull(result.getQueryName());
|
||||||
|
@ -521,7 +521,7 @@ public class QueryProfilerIT extends ESIntegTestCase {
|
||||||
assertNotNull("Profile response element should not be null", resp.getProfileResults());
|
assertNotNull("Profile response element should not be null", resp.getProfileResults());
|
||||||
assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0));
|
assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0));
|
||||||
|
|
||||||
for (Map.Entry<String, SearchProfileQueryPhaseResult> shardResult : resp.getProfileResults().entrySet()) {
|
for (Map.Entry<String, SearchProfileShardResult> shardResult : resp.getProfileResults().entrySet()) {
|
||||||
for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) {
|
for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) {
|
||||||
for (ProfileResult result : searchProfiles.getQueryResults()) {
|
for (ProfileResult result : searchProfiles.getQueryResults()) {
|
||||||
assertNotNull(result.getQueryName());
|
assertNotNull(result.getQueryName());
|
||||||
|
@ -575,7 +575,7 @@ public class QueryProfilerIT extends ESIntegTestCase {
|
||||||
assertNotNull("Profile response element should not be null", resp.getProfileResults());
|
assertNotNull("Profile response element should not be null", resp.getProfileResults());
|
||||||
assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0));
|
assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0));
|
||||||
|
|
||||||
for (Map.Entry<String, SearchProfileQueryPhaseResult> shardResult : resp.getProfileResults().entrySet()) {
|
for (Map.Entry<String, SearchProfileShardResult> shardResult : resp.getProfileResults().entrySet()) {
|
||||||
for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) {
|
for (QueryProfileShardResult searchProfiles : shardResult.getValue().getQueryProfileResults()) {
|
||||||
for (ProfileResult result : searchProfiles.getQueryResults()) {
|
for (ProfileResult result : searchProfiles.getQueryResults()) {
|
||||||
assertNotNull(result.getQueryName());
|
assertNotNull(result.getQueryName());
|
||||||
|
|
|
@ -42,6 +42,7 @@ import org.elasticsearch.search.internal.InternalSearchResponse;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult;
|
import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult;
|
||||||
import org.elasticsearch.search.profile.SearchProfileResults;
|
import org.elasticsearch.search.profile.SearchProfileResults;
|
||||||
|
import org.elasticsearch.search.profile.SearchProfileResultsBuilder;
|
||||||
import org.elasticsearch.search.query.QuerySearchResult;
|
import org.elasticsearch.search.query.QuerySearchResult;
|
||||||
import org.elasticsearch.search.suggest.Suggest;
|
import org.elasticsearch.search.suggest.Suggest;
|
||||||
import org.elasticsearch.search.suggest.Suggest.Suggestion;
|
import org.elasticsearch.search.suggest.Suggest.Suggestion;
|
||||||
|
@ -290,7 +291,7 @@ public final class SearchPhaseController {
|
||||||
assert currentOffset == sortedDocs.length : "expected no more score doc slices";
|
assert currentOffset == sortedDocs.length : "expected no more score doc slices";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return reducedQueryPhase.buildResponse(hits);
|
return reducedQueryPhase.buildResponse(hits, fetchResults);
|
||||||
}
|
}
|
||||||
|
|
||||||
private SearchHits getHits(ReducedQueryPhase reducedQueryPhase, boolean ignoreFrom,
|
private SearchHits getHits(ReducedQueryPhase reducedQueryPhase, boolean ignoreFrom,
|
||||||
|
@ -401,8 +402,22 @@ public final class SearchPhaseController {
|
||||||
numReducePhases++; // increment for this phase
|
numReducePhases++; // increment for this phase
|
||||||
if (queryResults.isEmpty()) { // early terminate we have nothing to reduce
|
if (queryResults.isEmpty()) { // early terminate we have nothing to reduce
|
||||||
final TotalHits totalHits = topDocsStats.getTotalHits();
|
final TotalHits totalHits = topDocsStats.getTotalHits();
|
||||||
return new ReducedQueryPhase(totalHits, topDocsStats.fetchHits, topDocsStats.getMaxScore(),
|
return new ReducedQueryPhase(
|
||||||
false, null, null, null, null, SortedTopDocs.EMPTY, null, numReducePhases, 0, 0, true);
|
totalHits,
|
||||||
|
topDocsStats.fetchHits,
|
||||||
|
topDocsStats.getMaxScore(),
|
||||||
|
false,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
SortedTopDocs.EMPTY,
|
||||||
|
null,
|
||||||
|
numReducePhases,
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
true
|
||||||
|
);
|
||||||
}
|
}
|
||||||
int total = queryResults.size();
|
int total = queryResults.size();
|
||||||
queryResults = queryResults.stream()
|
queryResults = queryResults.stream()
|
||||||
|
@ -419,7 +434,8 @@ public final class SearchPhaseController {
|
||||||
|
|
||||||
// count the total (we use the query result provider here, since we might not get any hits (we scrolled past them))
|
// count the total (we use the query result provider here, since we might not get any hits (we scrolled past them))
|
||||||
final Map<String, List<Suggestion<?>>> groupedSuggestions = hasSuggest ? new HashMap<>() : Collections.emptyMap();
|
final Map<String, List<Suggestion<?>>> groupedSuggestions = hasSuggest ? new HashMap<>() : Collections.emptyMap();
|
||||||
final Map<String, SearchProfileQueryPhaseResult> profileResults = hasProfileResults ? new HashMap<>(queryResults.size())
|
final Map<String, SearchProfileQueryPhaseResult> profileShardResults = hasProfileResults
|
||||||
|
? new HashMap<>(queryResults.size())
|
||||||
: Collections.emptyMap();
|
: Collections.emptyMap();
|
||||||
int from = 0;
|
int from = 0;
|
||||||
int size = 0;
|
int size = 0;
|
||||||
|
@ -449,7 +465,7 @@ public final class SearchPhaseController {
|
||||||
}
|
}
|
||||||
if (hasProfileResults) {
|
if (hasProfileResults) {
|
||||||
String key = result.getSearchShardTarget().toString();
|
String key = result.getSearchShardTarget().toString();
|
||||||
profileResults.put(key, result.consumeProfileResult());
|
profileShardResults.put(key, result.consumeProfileResult());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
final Suggest reducedSuggest;
|
final Suggest reducedSuggest;
|
||||||
|
@ -462,11 +478,13 @@ public final class SearchPhaseController {
|
||||||
reducedCompletionSuggestions = reducedSuggest.filter(CompletionSuggestion.class);
|
reducedCompletionSuggestions = reducedSuggest.filter(CompletionSuggestion.class);
|
||||||
}
|
}
|
||||||
final InternalAggregations aggregations = reduceAggs(aggReduceContextBuilder, performFinalReduce, bufferedAggs);
|
final InternalAggregations aggregations = reduceAggs(aggReduceContextBuilder, performFinalReduce, bufferedAggs);
|
||||||
final SearchProfileResults shardResults = profileResults.isEmpty() ? null : new SearchProfileResults(profileResults);
|
final SearchProfileResultsBuilder profileBuilder = profileShardResults.isEmpty()
|
||||||
|
? null
|
||||||
|
: new SearchProfileResultsBuilder(profileShardResults);
|
||||||
final SortedTopDocs sortedTopDocs = sortDocs(isScrollRequest, bufferedTopDocs, from, size, reducedCompletionSuggestions);
|
final SortedTopDocs sortedTopDocs = sortDocs(isScrollRequest, bufferedTopDocs, from, size, reducedCompletionSuggestions);
|
||||||
final TotalHits totalHits = topDocsStats.getTotalHits();
|
final TotalHits totalHits = topDocsStats.getTotalHits();
|
||||||
return new ReducedQueryPhase(totalHits, topDocsStats.fetchHits, topDocsStats.getMaxScore(),
|
return new ReducedQueryPhase(totalHits, topDocsStats.fetchHits, topDocsStats.getMaxScore(),
|
||||||
topDocsStats.timedOut, topDocsStats.terminatedEarly, reducedSuggest, aggregations, shardResults, sortedTopDocs,
|
topDocsStats.timedOut, topDocsStats.terminatedEarly, reducedSuggest, aggregations, profileBuilder, sortedTopDocs,
|
||||||
sortValueFormats, numReducePhases, size, from, false);
|
sortValueFormats, numReducePhases, size, from, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -535,7 +553,7 @@ public final class SearchPhaseController {
|
||||||
// the reduced internal aggregations
|
// the reduced internal aggregations
|
||||||
final InternalAggregations aggregations;
|
final InternalAggregations aggregations;
|
||||||
// the reduced profile results
|
// the reduced profile results
|
||||||
final SearchProfileResults shardResults;
|
final SearchProfileResultsBuilder profileBuilder;
|
||||||
// the number of reduces phases
|
// the number of reduces phases
|
||||||
final int numReducePhases;
|
final int numReducePhases;
|
||||||
//encloses info about the merged top docs, the sort fields used to sort the score docs etc.
|
//encloses info about the merged top docs, the sort fields used to sort the score docs etc.
|
||||||
|
@ -549,9 +567,22 @@ public final class SearchPhaseController {
|
||||||
// sort value formats used to sort / format the result
|
// sort value formats used to sort / format the result
|
||||||
final DocValueFormat[] sortValueFormats;
|
final DocValueFormat[] sortValueFormats;
|
||||||
|
|
||||||
ReducedQueryPhase(TotalHits totalHits, long fetchHits, float maxScore, boolean timedOut, Boolean terminatedEarly, Suggest suggest,
|
ReducedQueryPhase(
|
||||||
InternalAggregations aggregations, SearchProfileResults shardResults, SortedTopDocs sortedTopDocs,
|
TotalHits totalHits,
|
||||||
DocValueFormat[] sortValueFormats, int numReducePhases, int size, int from, boolean isEmptyResult) {
|
long fetchHits,
|
||||||
|
float maxScore,
|
||||||
|
boolean timedOut,
|
||||||
|
Boolean terminatedEarly,
|
||||||
|
Suggest suggest,
|
||||||
|
InternalAggregations aggregations,
|
||||||
|
SearchProfileResultsBuilder profileBuilder,
|
||||||
|
SortedTopDocs sortedTopDocs,
|
||||||
|
DocValueFormat[] sortValueFormats,
|
||||||
|
int numReducePhases,
|
||||||
|
int size,
|
||||||
|
int from,
|
||||||
|
boolean isEmptyResult
|
||||||
|
) {
|
||||||
if (numReducePhases <= 0) {
|
if (numReducePhases <= 0) {
|
||||||
throw new IllegalArgumentException("at least one reduce phase must have been applied but was: " + numReducePhases);
|
throw new IllegalArgumentException("at least one reduce phase must have been applied but was: " + numReducePhases);
|
||||||
}
|
}
|
||||||
|
@ -562,7 +593,7 @@ public final class SearchPhaseController {
|
||||||
this.terminatedEarly = terminatedEarly;
|
this.terminatedEarly = terminatedEarly;
|
||||||
this.suggest = suggest;
|
this.suggest = suggest;
|
||||||
this.aggregations = aggregations;
|
this.aggregations = aggregations;
|
||||||
this.shardResults = shardResults;
|
this.profileBuilder = profileBuilder;
|
||||||
this.numReducePhases = numReducePhases;
|
this.numReducePhases = numReducePhases;
|
||||||
this.sortedTopDocs = sortedTopDocs;
|
this.sortedTopDocs = sortedTopDocs;
|
||||||
this.size = size;
|
this.size = size;
|
||||||
|
@ -575,8 +606,28 @@ public final class SearchPhaseController {
|
||||||
* Creates a new search response from the given merged hits.
|
* Creates a new search response from the given merged hits.
|
||||||
* @see #merge(boolean, ReducedQueryPhase, Collection, IntFunction)
|
* @see #merge(boolean, ReducedQueryPhase, Collection, IntFunction)
|
||||||
*/
|
*/
|
||||||
public InternalSearchResponse buildResponse(SearchHits hits) {
|
public InternalSearchResponse buildResponse(SearchHits hits, Collection<? extends SearchPhaseResult> fetchResults) {
|
||||||
return new InternalSearchResponse(hits, aggregations, suggest, shardResults, timedOut, terminatedEarly, numReducePhases);
|
return new InternalSearchResponse(
|
||||||
|
hits,
|
||||||
|
aggregations,
|
||||||
|
suggest,
|
||||||
|
buildSearchProfileResults(fetchResults),
|
||||||
|
timedOut,
|
||||||
|
terminatedEarly,
|
||||||
|
numReducePhases
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private SearchProfileResults buildSearchProfileResults(Collection<? extends SearchPhaseResult> fetchResults) {
|
||||||
|
if (profileBuilder == null) {
|
||||||
|
assert fetchResults.stream()
|
||||||
|
.map(SearchPhaseResult::fetchResult)
|
||||||
|
.filter(r -> r != null)
|
||||||
|
.allMatch(r -> r.profileResult() == null) : "found fetch profile without search profile";
|
||||||
|
return null;
|
||||||
|
|
||||||
|
}
|
||||||
|
return profileBuilder.build(fetchResults);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -11,18 +11,18 @@ package org.elasticsearch.action.search;
|
||||||
import org.apache.lucene.search.TotalHits;
|
import org.apache.lucene.search.TotalHits;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.action.ActionResponse;
|
import org.elasticsearch.action.ActionResponse;
|
||||||
import org.elasticsearch.core.Nullable;
|
|
||||||
import org.elasticsearch.common.xcontent.ParseField;
|
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
import org.elasticsearch.core.TimeValue;
|
import org.elasticsearch.common.xcontent.ParseField;
|
||||||
import org.elasticsearch.common.xcontent.StatusToXContentObject;
|
import org.elasticsearch.common.xcontent.StatusToXContentObject;
|
||||||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||||
|
import org.elasticsearch.core.Nullable;
|
||||||
|
import org.elasticsearch.core.TimeValue;
|
||||||
import org.elasticsearch.rest.RestStatus;
|
import org.elasticsearch.rest.RestStatus;
|
||||||
import org.elasticsearch.rest.action.RestActions;
|
import org.elasticsearch.rest.action.RestActions;
|
||||||
import org.elasticsearch.search.SearchHit;
|
import org.elasticsearch.search.SearchHit;
|
||||||
|
@ -30,8 +30,8 @@ import org.elasticsearch.search.SearchHits;
|
||||||
import org.elasticsearch.search.aggregations.Aggregations;
|
import org.elasticsearch.search.aggregations.Aggregations;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||||
import org.elasticsearch.search.internal.InternalSearchResponse;
|
import org.elasticsearch.search.internal.InternalSearchResponse;
|
||||||
import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult;
|
|
||||||
import org.elasticsearch.search.profile.SearchProfileResults;
|
import org.elasticsearch.search.profile.SearchProfileResults;
|
||||||
|
import org.elasticsearch.search.profile.SearchProfileShardResult;
|
||||||
import org.elasticsearch.search.suggest.Suggest;
|
import org.elasticsearch.search.suggest.Suggest;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -225,7 +225,7 @@ public class SearchResponse extends ActionResponse implements StatusToXContentOb
|
||||||
* @return The profile results or an empty map
|
* @return The profile results or an empty map
|
||||||
*/
|
*/
|
||||||
@Nullable
|
@Nullable
|
||||||
public Map<String, SearchProfileQueryPhaseResult> getProfileResults() {
|
public Map<String, SearchProfileShardResult> getProfileResults() {
|
||||||
return internalResponse.profile();
|
return internalResponse.profile();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -27,8 +27,8 @@ import org.elasticsearch.search.SearchShardTarget;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||||
import org.elasticsearch.search.internal.InternalSearchResponse;
|
import org.elasticsearch.search.internal.InternalSearchResponse;
|
||||||
import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult;
|
|
||||||
import org.elasticsearch.search.profile.SearchProfileResults;
|
import org.elasticsearch.search.profile.SearchProfileResults;
|
||||||
|
import org.elasticsearch.search.profile.SearchProfileShardResult;
|
||||||
import org.elasticsearch.search.suggest.Suggest;
|
import org.elasticsearch.search.suggest.Suggest;
|
||||||
import org.elasticsearch.search.suggest.completion.CompletionSuggestion;
|
import org.elasticsearch.search.suggest.completion.CompletionSuggestion;
|
||||||
|
|
||||||
|
@ -111,7 +111,7 @@ final class SearchResponseMerger {
|
||||||
//the current reduce phase counts as one
|
//the current reduce phase counts as one
|
||||||
int numReducePhases = 1;
|
int numReducePhases = 1;
|
||||||
List<ShardSearchFailure> failures = new ArrayList<>();
|
List<ShardSearchFailure> failures = new ArrayList<>();
|
||||||
Map<String, SearchProfileQueryPhaseResult> profileResults = new HashMap<>();
|
Map<String, SearchProfileShardResult> profileResults = new HashMap<>();
|
||||||
List<InternalAggregations> aggs = new ArrayList<>();
|
List<InternalAggregations> aggs = new ArrayList<>();
|
||||||
Map<ShardIdAndClusterAlias, Integer> shards = new TreeMap<>();
|
Map<ShardIdAndClusterAlias, Integer> shards = new TreeMap<>();
|
||||||
List<TopDocs> topDocsList = new ArrayList<>(searchResponses.size());
|
List<TopDocs> topDocsList = new ArrayList<>(searchResponses.size());
|
||||||
|
|
|
@ -13,8 +13,8 @@ import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.search.SearchHits;
|
import org.elasticsearch.search.SearchHits;
|
||||||
import org.elasticsearch.search.aggregations.Aggregations;
|
import org.elasticsearch.search.aggregations.Aggregations;
|
||||||
import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult;
|
|
||||||
import org.elasticsearch.search.profile.SearchProfileResults;
|
import org.elasticsearch.search.profile.SearchProfileResults;
|
||||||
|
import org.elasticsearch.search.profile.SearchProfileShardResult;
|
||||||
import org.elasticsearch.search.suggest.Suggest;
|
import org.elasticsearch.search.suggest.Suggest;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -39,8 +39,15 @@ public class SearchResponseSections implements ToXContentFragment {
|
||||||
protected final Boolean terminatedEarly;
|
protected final Boolean terminatedEarly;
|
||||||
protected final int numReducePhases;
|
protected final int numReducePhases;
|
||||||
|
|
||||||
public SearchResponseSections(SearchHits hits, Aggregations aggregations, Suggest suggest, boolean timedOut, Boolean terminatedEarly,
|
public SearchResponseSections(
|
||||||
SearchProfileResults profileResults, int numReducePhases) {
|
SearchHits hits,
|
||||||
|
Aggregations aggregations,
|
||||||
|
Suggest suggest,
|
||||||
|
boolean timedOut,
|
||||||
|
Boolean terminatedEarly,
|
||||||
|
SearchProfileResults profileResults,
|
||||||
|
int numReducePhases
|
||||||
|
) {
|
||||||
this.hits = hits;
|
this.hits = hits;
|
||||||
this.aggregations = aggregations;
|
this.aggregations = aggregations;
|
||||||
this.suggest = suggest;
|
this.suggest = suggest;
|
||||||
|
@ -83,7 +90,7 @@ public class SearchResponseSections implements ToXContentFragment {
|
||||||
*
|
*
|
||||||
* @return Profile results
|
* @return Profile results
|
||||||
*/
|
*/
|
||||||
public final Map<String, SearchProfileQueryPhaseResult> profile() {
|
public final Map<String, SearchProfileShardResult> profile() {
|
||||||
if (profileResults == null) {
|
if (profileResults == null) {
|
||||||
return Collections.emptyMap();
|
return Collections.emptyMap();
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,7 +28,6 @@ import org.elasticsearch.cluster.routing.OperationRouting;
|
||||||
import org.elasticsearch.cluster.routing.ShardIterator;
|
import org.elasticsearch.cluster.routing.ShardIterator;
|
||||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||||
import org.elasticsearch.cluster.service.ClusterService;
|
import org.elasticsearch.cluster.service.ClusterService;
|
||||||
import org.elasticsearch.core.Nullable;
|
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.breaker.CircuitBreaker;
|
import org.elasticsearch.common.breaker.CircuitBreaker;
|
||||||
import org.elasticsearch.common.inject.Inject;
|
import org.elasticsearch.common.inject.Inject;
|
||||||
|
@ -36,9 +35,10 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
import org.elasticsearch.common.settings.Setting;
|
import org.elasticsearch.common.settings.Setting;
|
||||||
import org.elasticsearch.common.settings.Setting.Property;
|
import org.elasticsearch.common.settings.Setting.Property;
|
||||||
import org.elasticsearch.core.TimeValue;
|
|
||||||
import org.elasticsearch.common.util.concurrent.AtomicArray;
|
import org.elasticsearch.common.util.concurrent.AtomicArray;
|
||||||
import org.elasticsearch.common.util.concurrent.CountDown;
|
import org.elasticsearch.common.util.concurrent.CountDown;
|
||||||
|
import org.elasticsearch.core.Nullable;
|
||||||
|
import org.elasticsearch.core.TimeValue;
|
||||||
import org.elasticsearch.index.Index;
|
import org.elasticsearch.index.Index;
|
||||||
import org.elasticsearch.index.query.Rewriteable;
|
import org.elasticsearch.index.query.Rewriteable;
|
||||||
import org.elasticsearch.index.shard.ShardId;
|
import org.elasticsearch.index.shard.ShardId;
|
||||||
|
@ -53,8 +53,8 @@ import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||||
import org.elasticsearch.search.internal.AliasFilter;
|
import org.elasticsearch.search.internal.AliasFilter;
|
||||||
import org.elasticsearch.search.internal.InternalSearchResponse;
|
import org.elasticsearch.search.internal.InternalSearchResponse;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult;
|
|
||||||
import org.elasticsearch.search.profile.SearchProfileResults;
|
import org.elasticsearch.search.profile.SearchProfileResults;
|
||||||
|
import org.elasticsearch.search.profile.SearchProfileShardResult;
|
||||||
import org.elasticsearch.tasks.Task;
|
import org.elasticsearch.tasks.Task;
|
||||||
import org.elasticsearch.tasks.TaskId;
|
import org.elasticsearch.tasks.TaskId;
|
||||||
import org.elasticsearch.threadpool.ThreadPool;
|
import org.elasticsearch.threadpool.ThreadPool;
|
||||||
|
@ -366,9 +366,10 @@ public class TransportSearchAction extends HandledTransportAction<SearchRequest,
|
||||||
remoteClusterClient.search(ccsSearchRequest, new ActionListener<SearchResponse>() {
|
remoteClusterClient.search(ccsSearchRequest, new ActionListener<SearchResponse>() {
|
||||||
@Override
|
@Override
|
||||||
public void onResponse(SearchResponse searchResponse) {
|
public void onResponse(SearchResponse searchResponse) {
|
||||||
Map<String, SearchProfileQueryPhaseResult> profileResults = searchResponse.getProfileResults();
|
Map<String, SearchProfileShardResult> profileResults = searchResponse.getProfileResults();
|
||||||
SearchProfileResults profile = profileResults == null || profileResults.isEmpty()
|
SearchProfileResults profile = profileResults == null || profileResults.isEmpty()
|
||||||
? null : new SearchProfileResults(profileResults);
|
? null
|
||||||
|
: new SearchProfileResults(profileResults);
|
||||||
InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchResponse.getHits(),
|
InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchResponse.getHits(),
|
||||||
(InternalAggregations) searchResponse.getAggregations(), searchResponse.getSuggest(), profile,
|
(InternalAggregations) searchResponse.getAggregations(), searchResponse.getSuggest(), profile,
|
||||||
searchResponse.isTimedOut(), searchResponse.isTerminatedEarly(), searchResponse.getNumReducePhases());
|
searchResponse.isTimedOut(), searchResponse.isTerminatedEarly(), searchResponse.getNumReducePhases());
|
||||||
|
|
|
@ -40,11 +40,16 @@ import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
|
||||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||||
import org.elasticsearch.search.fetch.FetchSearchResult;
|
import org.elasticsearch.search.fetch.FetchSearchResult;
|
||||||
import org.elasticsearch.search.internal.SubSearchContext;
|
import org.elasticsearch.search.internal.SubSearchContext;
|
||||||
|
import org.elasticsearch.search.profile.ProfileResult;
|
||||||
import org.elasticsearch.search.rescore.RescoreContext;
|
import org.elasticsearch.search.rescore.RescoreContext;
|
||||||
import org.elasticsearch.search.sort.SortAndFormats;
|
import org.elasticsearch.search.sort.SortAndFormats;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.function.BiConsumer;
|
||||||
|
|
||||||
class TopHitsAggregator extends MetricsAggregator {
|
class TopHitsAggregator extends MetricsAggregator {
|
||||||
|
|
||||||
|
@ -62,6 +67,7 @@ class TopHitsAggregator extends MetricsAggregator {
|
||||||
|
|
||||||
private final SubSearchContext subSearchContext;
|
private final SubSearchContext subSearchContext;
|
||||||
private final LongObjectPagedHashMap<Collectors> topDocsCollectors;
|
private final LongObjectPagedHashMap<Collectors> topDocsCollectors;
|
||||||
|
private final List<ProfileResult> fetchProfiles;
|
||||||
|
|
||||||
TopHitsAggregator(
|
TopHitsAggregator(
|
||||||
SubSearchContext subSearchContext,
|
SubSearchContext subSearchContext,
|
||||||
|
@ -73,6 +79,7 @@ class TopHitsAggregator extends MetricsAggregator {
|
||||||
super(name, context, parent, metadata);
|
super(name, context, parent, metadata);
|
||||||
topDocsCollectors = new LongObjectPagedHashMap<>(1, context.bigArrays());
|
topDocsCollectors = new LongObjectPagedHashMap<>(1, context.bigArrays());
|
||||||
this.subSearchContext = subSearchContext;
|
this.subSearchContext = subSearchContext;
|
||||||
|
fetchProfiles = context.profiling() ? new ArrayList<>() : null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -183,6 +190,9 @@ class TopHitsAggregator extends MetricsAggregator {
|
||||||
subSearchContext.docIdsToLoad(docIdsToLoad, docIdsToLoad.length);
|
subSearchContext.docIdsToLoad(docIdsToLoad, docIdsToLoad.length);
|
||||||
subSearchContext.fetchPhase().execute(subSearchContext);
|
subSearchContext.fetchPhase().execute(subSearchContext);
|
||||||
FetchSearchResult fetchResult = subSearchContext.fetchResult();
|
FetchSearchResult fetchResult = subSearchContext.fetchResult();
|
||||||
|
if (fetchProfiles != null) {
|
||||||
|
fetchProfiles.add(fetchResult.profileResult());
|
||||||
|
}
|
||||||
SearchHit[] internalHits = fetchResult.fetchResult().hits().getHits();
|
SearchHit[] internalHits = fetchResult.fetchResult().hits().getHits();
|
||||||
for (int i = 0; i < internalHits.length; i++) {
|
for (int i = 0; i < internalHits.length; i++) {
|
||||||
ScoreDoc scoreDoc = topDocs.scoreDocs[i];
|
ScoreDoc scoreDoc = topDocs.scoreDocs[i];
|
||||||
|
@ -226,6 +236,19 @@ class TopHitsAggregator extends MetricsAggregator {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void collectDebugInfo(BiConsumer<String, Object> add) {
|
||||||
|
super.collectDebugInfo(add);
|
||||||
|
List<Map<String, Object>> debug = new ArrayList<>();
|
||||||
|
for (ProfileResult result : fetchProfiles) {
|
||||||
|
Map<String, Object> resultDebug = new HashMap<>();
|
||||||
|
resultDebug.put("time", result.getTime());
|
||||||
|
resultDebug.put("breakdown", result.getTimeBreakdown());
|
||||||
|
debug.add(resultDebug);
|
||||||
|
}
|
||||||
|
add.accept("fetch_profile", debug);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doClose() {
|
protected void doClose() {
|
||||||
Releasables.close(topDocsCollectors);
|
Releasables.close(topDocsCollectors);
|
||||||
|
|
|
@ -14,12 +14,12 @@ import org.apache.lucene.index.LeafReaderContext;
|
||||||
import org.apache.lucene.index.ReaderUtil;
|
import org.apache.lucene.index.ReaderUtil;
|
||||||
import org.apache.lucene.search.TotalHits;
|
import org.apache.lucene.search.TotalHits;
|
||||||
import org.elasticsearch.common.CheckedBiConsumer;
|
import org.elasticsearch.common.CheckedBiConsumer;
|
||||||
import org.elasticsearch.core.Tuple;
|
|
||||||
import org.elasticsearch.common.document.DocumentField;
|
import org.elasticsearch.common.document.DocumentField;
|
||||||
import org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader;
|
import org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader;
|
||||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
import org.elasticsearch.common.xcontent.XContentType;
|
||||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||||
|
import org.elasticsearch.core.Tuple;
|
||||||
import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor;
|
import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor;
|
||||||
import org.elasticsearch.index.fieldvisitor.FieldsVisitor;
|
import org.elasticsearch.index.fieldvisitor.FieldsVisitor;
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||||
|
@ -37,6 +37,7 @@ import org.elasticsearch.search.fetch.subphase.InnerHitsContext;
|
||||||
import org.elasticsearch.search.fetch.subphase.InnerHitsPhase;
|
import org.elasticsearch.search.fetch.subphase.InnerHitsPhase;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.search.lookup.SourceLookup;
|
import org.elasticsearch.search.lookup.SourceLookup;
|
||||||
|
import org.elasticsearch.search.profile.ProfileResult;
|
||||||
import org.elasticsearch.tasks.TaskCancelledException;
|
import org.elasticsearch.tasks.TaskCancelledException;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -78,11 +79,26 @@ public class FetchPhase {
|
||||||
|
|
||||||
if (context.docIdsToLoadSize() == 0) {
|
if (context.docIdsToLoadSize() == 0) {
|
||||||
// no individual hits to process, so we shortcut
|
// no individual hits to process, so we shortcut
|
||||||
context.fetchResult().hits(new SearchHits(new SearchHit[0], context.queryResult().getTotalHits(),
|
SearchHits hits = new SearchHits(new SearchHit[0], context.queryResult().getTotalHits(), context.queryResult().getMaxScore());
|
||||||
context.queryResult().getMaxScore()));
|
context.fetchResult().shardResult(hits, null);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Profiler profiler = context.getProfilers() == null ? Profiler.NOOP : context.getProfilers().startProfilingFetchPhase();
|
||||||
|
SearchHits hits = null;
|
||||||
|
try {
|
||||||
|
hits = buildSearchHits(context, profiler);
|
||||||
|
} finally {
|
||||||
|
// Always finish profiling
|
||||||
|
ProfileResult profileResult = profiler.finish();
|
||||||
|
// Only set the shardResults if building search hits was successful
|
||||||
|
if (hits != null) {
|
||||||
|
context.fetchResult().shardResult(hits, profileResult);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private SearchHits buildSearchHits(SearchContext context, Profiler profiler) {
|
||||||
DocIdToIndex[] docs = new DocIdToIndex[context.docIdsToLoadSize()];
|
DocIdToIndex[] docs = new DocIdToIndex[context.docIdsToLoadSize()];
|
||||||
for (int index = 0; index < context.docIdsToLoadSize(); index++) {
|
for (int index = 0; index < context.docIdsToLoadSize(); index++) {
|
||||||
docs[index] = new DocIdToIndex(context.docIdsToLoad()[index], index);
|
docs[index] = new DocIdToIndex(context.docIdsToLoad()[index], index);
|
||||||
|
@ -92,12 +108,13 @@ public class FetchPhase {
|
||||||
|
|
||||||
Map<String, Set<String>> storedToRequestedFields = new HashMap<>();
|
Map<String, Set<String>> storedToRequestedFields = new HashMap<>();
|
||||||
FieldsVisitor fieldsVisitor = createStoredFieldsVisitor(context, storedToRequestedFields);
|
FieldsVisitor fieldsVisitor = createStoredFieldsVisitor(context, storedToRequestedFields);
|
||||||
|
profiler.visitor(fieldsVisitor);
|
||||||
|
|
||||||
FetchContext fetchContext = new FetchContext(context);
|
FetchContext fetchContext = new FetchContext(context);
|
||||||
|
|
||||||
SearchHit[] hits = new SearchHit[context.docIdsToLoadSize()];
|
SearchHit[] hits = new SearchHit[context.docIdsToLoadSize()];
|
||||||
|
|
||||||
List<FetchSubPhaseProcessor> processors = getProcessors(context.shardTarget(), fetchContext);
|
List<FetchSubPhaseProcessor> processors = getProcessors(context.shardTarget(), fetchContext, profiler);
|
||||||
NestedDocuments nestedDocuments = context.getSearchExecutionContext().getNestedDocuments();
|
NestedDocuments nestedDocuments = context.getSearchExecutionContext().getNestedDocuments();
|
||||||
|
|
||||||
int currentReaderIndex = -1;
|
int currentReaderIndex = -1;
|
||||||
|
@ -113,27 +130,33 @@ public class FetchPhase {
|
||||||
try {
|
try {
|
||||||
int readerIndex = ReaderUtil.subIndex(docId, context.searcher().getIndexReader().leaves());
|
int readerIndex = ReaderUtil.subIndex(docId, context.searcher().getIndexReader().leaves());
|
||||||
if (currentReaderIndex != readerIndex) {
|
if (currentReaderIndex != readerIndex) {
|
||||||
currentReaderContext = context.searcher().getIndexReader().leaves().get(readerIndex);
|
profiler.startNextReader();
|
||||||
currentReaderIndex = readerIndex;
|
try {
|
||||||
if (currentReaderContext.reader() instanceof SequentialStoredFieldsLeafReader
|
currentReaderContext = context.searcher().getIndexReader().leaves().get(readerIndex);
|
||||||
&& hasSequentialDocs && docs.length >= 10) {
|
currentReaderIndex = readerIndex;
|
||||||
// All the docs to fetch are adjacent but Lucene stored fields are optimized
|
if (currentReaderContext.reader() instanceof SequentialStoredFieldsLeafReader
|
||||||
// for random access and don't optimize for sequential access - except for merging.
|
&& hasSequentialDocs && docs.length >= 10) {
|
||||||
// So we do a little hack here and pretend we're going to do merges in order to
|
// All the docs to fetch are adjacent but Lucene stored fields are optimized
|
||||||
// get better sequential access.
|
// for random access and don't optimize for sequential access - except for merging.
|
||||||
SequentialStoredFieldsLeafReader lf = (SequentialStoredFieldsLeafReader) currentReaderContext.reader();
|
// So we do a little hack here and pretend we're going to do merges in order to
|
||||||
fieldReader = lf.getSequentialStoredFieldsReader()::visitDocument;
|
// get better sequential access.
|
||||||
} else {
|
SequentialStoredFieldsLeafReader lf = (SequentialStoredFieldsLeafReader) currentReaderContext.reader();
|
||||||
fieldReader = currentReaderContext.reader()::document;
|
fieldReader = lf.getSequentialStoredFieldsReader()::visitDocument;
|
||||||
|
} else {
|
||||||
|
fieldReader = currentReaderContext.reader()::document;
|
||||||
|
}
|
||||||
|
for (FetchSubPhaseProcessor processor : processors) {
|
||||||
|
processor.setNextReader(currentReaderContext);
|
||||||
|
}
|
||||||
|
leafNestedDocuments = nestedDocuments.getLeafNestedDocuments(currentReaderContext);
|
||||||
|
} finally {
|
||||||
|
profiler.stopNextReader();
|
||||||
}
|
}
|
||||||
for (FetchSubPhaseProcessor processor : processors) {
|
|
||||||
processor.setNextReader(currentReaderContext);
|
|
||||||
}
|
|
||||||
leafNestedDocuments = nestedDocuments.getLeafNestedDocuments(currentReaderContext);
|
|
||||||
}
|
}
|
||||||
assert currentReaderContext != null;
|
assert currentReaderContext != null;
|
||||||
HitContext hit = prepareHitContext(
|
HitContext hit = prepareHitContext(
|
||||||
context,
|
context,
|
||||||
|
profiler,
|
||||||
leafNestedDocuments,
|
leafNestedDocuments,
|
||||||
fieldsVisitor,
|
fieldsVisitor,
|
||||||
docId,
|
docId,
|
||||||
|
@ -153,17 +176,16 @@ public class FetchPhase {
|
||||||
}
|
}
|
||||||
|
|
||||||
TotalHits totalHits = context.queryResult().getTotalHits();
|
TotalHits totalHits = context.queryResult().getTotalHits();
|
||||||
context.fetchResult().hits(new SearchHits(hits, totalHits, context.queryResult().getMaxScore()));
|
return new SearchHits(hits, totalHits, context.queryResult().getMaxScore());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
List<FetchSubPhaseProcessor> getProcessors(SearchShardTarget target, FetchContext context) {
|
List<FetchSubPhaseProcessor> getProcessors(SearchShardTarget target, FetchContext context, Profiler profiler) {
|
||||||
try {
|
try {
|
||||||
List<FetchSubPhaseProcessor> processors = new ArrayList<>();
|
List<FetchSubPhaseProcessor> processors = new ArrayList<>();
|
||||||
for (FetchSubPhase fsp : fetchSubPhases) {
|
for (FetchSubPhase fsp : fetchSubPhases) {
|
||||||
FetchSubPhaseProcessor processor = fsp.getProcessor(context);
|
FetchSubPhaseProcessor processor = fsp.getProcessor(context);
|
||||||
if (processor != null) {
|
if (processor != null) {
|
||||||
processors.add(processor);
|
processors.add(profiler.profile(fsp.getClass().getSimpleName(), "", processor));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return processors;
|
return processors;
|
||||||
|
@ -233,6 +255,7 @@ public class FetchPhase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private HitContext prepareHitContext(SearchContext context,
|
private HitContext prepareHitContext(SearchContext context,
|
||||||
|
Profiler profiler,
|
||||||
LeafNestedDocuments nestedDocuments,
|
LeafNestedDocuments nestedDocuments,
|
||||||
FieldsVisitor fieldsVisitor,
|
FieldsVisitor fieldsVisitor,
|
||||||
int docId,
|
int docId,
|
||||||
|
@ -241,9 +264,9 @@ public class FetchPhase {
|
||||||
CheckedBiConsumer<Integer, FieldsVisitor, IOException> storedFieldReader) throws IOException {
|
CheckedBiConsumer<Integer, FieldsVisitor, IOException> storedFieldReader) throws IOException {
|
||||||
if (nestedDocuments.advance(docId - subReaderContext.docBase) == null) {
|
if (nestedDocuments.advance(docId - subReaderContext.docBase) == null) {
|
||||||
return prepareNonNestedHitContext(
|
return prepareNonNestedHitContext(
|
||||||
context, fieldsVisitor, docId, storedToRequestedFields, subReaderContext, storedFieldReader);
|
context, profiler, fieldsVisitor, docId, storedToRequestedFields, subReaderContext, storedFieldReader);
|
||||||
} else {
|
} else {
|
||||||
return prepareNestedHitContext(context, docId, nestedDocuments, storedToRequestedFields,
|
return prepareNestedHitContext(context, profiler, docId, nestedDocuments, storedToRequestedFields,
|
||||||
subReaderContext, storedFieldReader);
|
subReaderContext, storedFieldReader);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -256,6 +279,7 @@ public class FetchPhase {
|
||||||
* allows fetch subphases that use the hit context to access the preloaded source.
|
* allows fetch subphases that use the hit context to access the preloaded source.
|
||||||
*/
|
*/
|
||||||
private HitContext prepareNonNestedHitContext(SearchContext context,
|
private HitContext prepareNonNestedHitContext(SearchContext context,
|
||||||
|
Profiler profiler,
|
||||||
FieldsVisitor fieldsVisitor,
|
FieldsVisitor fieldsVisitor,
|
||||||
int docId,
|
int docId,
|
||||||
Map<String, Set<String>> storedToRequestedFields,
|
Map<String, Set<String>> storedToRequestedFields,
|
||||||
|
@ -267,7 +291,7 @@ public class FetchPhase {
|
||||||
return new HitContext(hit, subReaderContext, subDocId);
|
return new HitContext(hit, subReaderContext, subDocId);
|
||||||
} else {
|
} else {
|
||||||
SearchHit hit;
|
SearchHit hit;
|
||||||
loadStoredFields(context.getSearchExecutionContext()::getFieldType, fieldReader, fieldsVisitor, subDocId);
|
loadStoredFields(context.getSearchExecutionContext()::getFieldType, profiler, fieldReader, fieldsVisitor, subDocId);
|
||||||
if (fieldsVisitor.fields().isEmpty() == false) {
|
if (fieldsVisitor.fields().isEmpty() == false) {
|
||||||
Map<String, DocumentField> docFields = new HashMap<>();
|
Map<String, DocumentField> docFields = new HashMap<>();
|
||||||
Map<String, DocumentField> metaFields = new HashMap<>();
|
Map<String, DocumentField> metaFields = new HashMap<>();
|
||||||
|
@ -301,6 +325,7 @@ public class FetchPhase {
|
||||||
*/
|
*/
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
private HitContext prepareNestedHitContext(SearchContext context,
|
private HitContext prepareNestedHitContext(SearchContext context,
|
||||||
|
Profiler profiler,
|
||||||
int topDocId,
|
int topDocId,
|
||||||
LeafNestedDocuments nestedInfo,
|
LeafNestedDocuments nestedInfo,
|
||||||
Map<String, Set<String>> storedToRequestedFields,
|
Map<String, Set<String>> storedToRequestedFields,
|
||||||
|
@ -328,7 +353,13 @@ public class FetchPhase {
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
FieldsVisitor rootFieldsVisitor = new FieldsVisitor(needSource);
|
FieldsVisitor rootFieldsVisitor = new FieldsVisitor(needSource);
|
||||||
loadStoredFields(searchExecutionContext::getFieldType, storedFieldReader, rootFieldsVisitor, nestedInfo.rootDoc());
|
loadStoredFields(
|
||||||
|
searchExecutionContext::getFieldType,
|
||||||
|
profiler,
|
||||||
|
storedFieldReader,
|
||||||
|
rootFieldsVisitor,
|
||||||
|
nestedInfo.rootDoc()
|
||||||
|
);
|
||||||
rootId = rootFieldsVisitor.id();
|
rootId = rootFieldsVisitor.id();
|
||||||
|
|
||||||
if (needSource) {
|
if (needSource) {
|
||||||
|
@ -346,7 +377,13 @@ public class FetchPhase {
|
||||||
Map<String, DocumentField> metaFields = emptyMap();
|
Map<String, DocumentField> metaFields = emptyMap();
|
||||||
if (context.hasStoredFields() && context.storedFieldsContext().fieldNames().isEmpty() == false) {
|
if (context.hasStoredFields() && context.storedFieldsContext().fieldNames().isEmpty() == false) {
|
||||||
FieldsVisitor nestedFieldsVisitor = new CustomFieldsVisitor(storedToRequestedFields.keySet(), false);
|
FieldsVisitor nestedFieldsVisitor = new CustomFieldsVisitor(storedToRequestedFields.keySet(), false);
|
||||||
loadStoredFields(searchExecutionContext::getFieldType, storedFieldReader, nestedFieldsVisitor, nestedInfo.doc());
|
loadStoredFields(
|
||||||
|
searchExecutionContext::getFieldType,
|
||||||
|
profiler,
|
||||||
|
storedFieldReader,
|
||||||
|
nestedFieldsVisitor,
|
||||||
|
nestedInfo.doc()
|
||||||
|
);
|
||||||
if (nestedFieldsVisitor.fields().isEmpty() == false) {
|
if (nestedFieldsVisitor.fields().isEmpty() == false) {
|
||||||
docFields = new HashMap<>();
|
docFields = new HashMap<>();
|
||||||
metaFields = new HashMap<>();
|
metaFields = new HashMap<>();
|
||||||
|
@ -390,11 +427,17 @@ public class FetchPhase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void loadStoredFields(Function<String, MappedFieldType> fieldTypeLookup,
|
private void loadStoredFields(Function<String, MappedFieldType> fieldTypeLookup,
|
||||||
|
Profiler profileListener,
|
||||||
CheckedBiConsumer<Integer, FieldsVisitor, IOException> fieldReader,
|
CheckedBiConsumer<Integer, FieldsVisitor, IOException> fieldReader,
|
||||||
FieldsVisitor fieldVisitor, int docId) throws IOException {
|
FieldsVisitor fieldVisitor, int docId) throws IOException {
|
||||||
fieldVisitor.reset();
|
try {
|
||||||
fieldReader.accept(docId, fieldVisitor);
|
profileListener.startLoadingStoredFields();
|
||||||
fieldVisitor.postProcess(fieldTypeLookup);
|
fieldVisitor.reset();
|
||||||
|
fieldReader.accept(docId, fieldVisitor);
|
||||||
|
fieldVisitor.postProcess(fieldTypeLookup);
|
||||||
|
} finally {
|
||||||
|
profileListener.stopLoadingStoredFields();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void fillDocAndMetaFields(SearchContext context, FieldsVisitor fieldsVisitor,
|
private static void fillDocAndMetaFields(SearchContext context, FieldsVisitor fieldsVisitor,
|
||||||
|
@ -428,4 +471,52 @@ public class FetchPhase {
|
||||||
static boolean hasSequentialDocs(DocIdToIndex[] docs) {
|
static boolean hasSequentialDocs(DocIdToIndex[] docs) {
|
||||||
return docs.length > 0 && docs[docs.length-1].docId - docs[0].docId == docs.length - 1;
|
return docs.length > 0 && docs[docs.length-1].docId - docs[0].docId == docs.length - 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface Profiler {
|
||||||
|
ProfileResult finish();
|
||||||
|
|
||||||
|
FetchSubPhaseProcessor profile(String type, String description, FetchSubPhaseProcessor processor);
|
||||||
|
|
||||||
|
void visitor(FieldsVisitor fieldsVisitor);
|
||||||
|
|
||||||
|
void startLoadingStoredFields();
|
||||||
|
|
||||||
|
void stopLoadingStoredFields();
|
||||||
|
|
||||||
|
void startNextReader();
|
||||||
|
|
||||||
|
void stopNextReader();
|
||||||
|
|
||||||
|
Profiler NOOP = new Profiler() {
|
||||||
|
@Override
|
||||||
|
public ProfileResult finish() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void visitor(FieldsVisitor fieldsVisitor) {}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public FetchSubPhaseProcessor profile(String type, String description, FetchSubPhaseProcessor processor) {
|
||||||
|
return processor;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void startLoadingStoredFields() {}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void stopLoadingStoredFields() {}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void startNextReader() {}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void stopNextReader() {}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return "noop";
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,183 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License
|
||||||
|
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||||
|
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||||
|
* Side Public License, v 1.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.search.fetch;
|
||||||
|
|
||||||
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
|
import org.elasticsearch.index.fieldvisitor.FieldsVisitor;
|
||||||
|
import org.elasticsearch.search.fetch.FetchSubPhase.HitContext;
|
||||||
|
import org.elasticsearch.search.profile.AbstractProfileBreakdown;
|
||||||
|
import org.elasticsearch.search.profile.ProfileResult;
|
||||||
|
import org.elasticsearch.search.profile.Timer;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Comparator;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Locale;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import static java.util.stream.Collectors.toList;
|
||||||
|
|
||||||
|
public class FetchProfiler implements FetchPhase.Profiler {
|
||||||
|
private final FetchProfileBreakdown current;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start profiling at the current time.
|
||||||
|
*/
|
||||||
|
public FetchProfiler() {
|
||||||
|
this(System.nanoTime());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build the profiler starting at a fixed time.
|
||||||
|
*/
|
||||||
|
public FetchProfiler(long nanoTime) {
|
||||||
|
current = new FetchProfileBreakdown(nanoTime);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Finish profiling at the current time.
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public ProfileResult finish() {
|
||||||
|
return finish(System.nanoTime());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Finish profiling at a fixed time.
|
||||||
|
*/
|
||||||
|
public ProfileResult finish(long nanoTime) {
|
||||||
|
return current.result(nanoTime);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void visitor(FieldsVisitor fieldsVisitor) {
|
||||||
|
current.debug.put(
|
||||||
|
"stored_fields",
|
||||||
|
fieldsVisitor == null ? List.of() : fieldsVisitor.getFieldNames().stream().sorted().collect(toList())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public FetchSubPhaseProcessor profile(String type, String description, FetchSubPhaseProcessor delegate) {
|
||||||
|
FetchSubPhaseProfileBreakdown breakdown = new FetchSubPhaseProfileBreakdown(type, description, delegate);
|
||||||
|
current.subPhases.add(breakdown);
|
||||||
|
return new FetchSubPhaseProcessor() {
|
||||||
|
@Override
|
||||||
|
public void setNextReader(LeafReaderContext readerContext) throws IOException {
|
||||||
|
Timer timer = breakdown.getTimer(FetchSubPhaseTiming.NEXT_READER);
|
||||||
|
timer.start();
|
||||||
|
try {
|
||||||
|
delegate.setNextReader(readerContext);
|
||||||
|
} finally {
|
||||||
|
timer.stop();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void process(HitContext hitContext) throws IOException {
|
||||||
|
Timer timer = breakdown.getTimer(FetchSubPhaseTiming.PROCESS);
|
||||||
|
timer.start();
|
||||||
|
try {
|
||||||
|
delegate.process(hitContext);
|
||||||
|
} finally {
|
||||||
|
timer.stop();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void startLoadingStoredFields() {
|
||||||
|
current.getTimer(FetchPhaseTiming.LOAD_STORED_FIELDS).start();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void stopLoadingStoredFields() {
|
||||||
|
current.getTimer(FetchPhaseTiming.LOAD_STORED_FIELDS).stop();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void startNextReader() {
|
||||||
|
current.getTimer(FetchPhaseTiming.NEXT_READER).start();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void stopNextReader() {
|
||||||
|
current.getTimer(FetchPhaseTiming.NEXT_READER).stop();
|
||||||
|
}
|
||||||
|
|
||||||
|
static class FetchProfileBreakdown extends AbstractProfileBreakdown<FetchPhaseTiming> {
|
||||||
|
private final long start;
|
||||||
|
private final Map<String, Object> debug = new HashMap<>();
|
||||||
|
private final List<FetchSubPhaseProfileBreakdown> subPhases = new ArrayList<>();
|
||||||
|
|
||||||
|
FetchProfileBreakdown(long start) {
|
||||||
|
super(FetchPhaseTiming.class);
|
||||||
|
this.start = start;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Map<String, Object> toDebugMap() {
|
||||||
|
return Map.copyOf(debug);
|
||||||
|
}
|
||||||
|
|
||||||
|
ProfileResult result(long stop) {
|
||||||
|
List<ProfileResult> children = subPhases.stream()
|
||||||
|
.sorted(Comparator.comparing(b -> b.type))
|
||||||
|
.map(FetchSubPhaseProfileBreakdown::result)
|
||||||
|
.collect(toList());
|
||||||
|
return new ProfileResult("fetch", "", toBreakdownMap(), toDebugMap(), stop - start, children);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
enum FetchPhaseTiming {
|
||||||
|
NEXT_READER,
|
||||||
|
LOAD_STORED_FIELDS;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return name().toLowerCase(Locale.ROOT);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static class FetchSubPhaseProfileBreakdown extends AbstractProfileBreakdown<FetchSubPhaseTiming> {
|
||||||
|
private final String type;
|
||||||
|
private final String description;
|
||||||
|
private final FetchSubPhaseProcessor processor;
|
||||||
|
|
||||||
|
FetchSubPhaseProfileBreakdown(String type, String description, FetchSubPhaseProcessor processor) {
|
||||||
|
super(FetchSubPhaseTiming.class);
|
||||||
|
this.type = type;
|
||||||
|
this.description = description;
|
||||||
|
this.processor = processor;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Map<String, Object> toDebugMap() {
|
||||||
|
return processor.getDebugInfo();
|
||||||
|
}
|
||||||
|
|
||||||
|
ProfileResult result() {
|
||||||
|
return new ProfileResult(type, description, toBreakdownMap(), toDebugMap(), toNodeTime(), List.of());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
enum FetchSubPhaseTiming {
|
||||||
|
NEXT_READER,
|
||||||
|
PROCESS;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return name().toLowerCase(Locale.ROOT);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -8,6 +8,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.fetch;
|
package org.elasticsearch.search.fetch;
|
||||||
|
|
||||||
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.search.SearchHit;
|
import org.elasticsearch.search.SearchHit;
|
||||||
|
@ -15,6 +16,7 @@ import org.elasticsearch.search.SearchHits;
|
||||||
import org.elasticsearch.search.SearchPhaseResult;
|
import org.elasticsearch.search.SearchPhaseResult;
|
||||||
import org.elasticsearch.search.SearchShardTarget;
|
import org.elasticsearch.search.SearchShardTarget;
|
||||||
import org.elasticsearch.search.internal.ShardSearchContextId;
|
import org.elasticsearch.search.internal.ShardSearchContextId;
|
||||||
|
import org.elasticsearch.search.profile.ProfileResult;
|
||||||
import org.elasticsearch.search.query.QuerySearchResult;
|
import org.elasticsearch.search.query.QuerySearchResult;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -25,18 +27,34 @@ public final class FetchSearchResult extends SearchPhaseResult {
|
||||||
// client side counter
|
// client side counter
|
||||||
private transient int counter;
|
private transient int counter;
|
||||||
|
|
||||||
|
private ProfileResult profileResult;
|
||||||
|
|
||||||
public FetchSearchResult() {
|
public FetchSearchResult() {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public FetchSearchResult(ShardSearchContextId id, SearchShardTarget shardTarget) {
|
||||||
|
this.contextId = id;
|
||||||
|
setSearchShardTarget(shardTarget);
|
||||||
|
}
|
||||||
|
|
||||||
public FetchSearchResult(StreamInput in) throws IOException {
|
public FetchSearchResult(StreamInput in) throws IOException {
|
||||||
super(in);
|
super(in);
|
||||||
contextId = new ShardSearchContextId(in);
|
contextId = new ShardSearchContextId(in);
|
||||||
hits = new SearchHits(in);
|
hits = new SearchHits(in);
|
||||||
|
if (in.getVersion().onOrAfter(Version.V_8_0_0)) {
|
||||||
|
profileResult = in.readOptionalWriteable(ProfileResult::new);
|
||||||
|
} else {
|
||||||
|
profileResult = null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public FetchSearchResult(ShardSearchContextId id, SearchShardTarget shardTarget) {
|
@Override
|
||||||
this.contextId = id;
|
public void writeTo(StreamOutput out) throws IOException {
|
||||||
setSearchShardTarget(shardTarget);
|
contextId.writeTo(out);
|
||||||
|
hits.writeTo(out);
|
||||||
|
if (out.getVersion().onOrAfter(Version.V_8_0_0)) {
|
||||||
|
out.writeOptionalWriteable(profileResult);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -49,9 +67,11 @@ public final class FetchSearchResult extends SearchPhaseResult {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void hits(SearchHits hits) {
|
public void shardResult(SearchHits hits, ProfileResult profileResult) {
|
||||||
assert assertNoSearchTarget(hits);
|
assert assertNoSearchTarget(hits);
|
||||||
this.hits = hits;
|
this.hits = hits;
|
||||||
|
assert this.profileResult == null;
|
||||||
|
this.profileResult = profileResult;
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean assertNoSearchTarget(SearchHits hits) {
|
private boolean assertNoSearchTarget(SearchHits hits) {
|
||||||
|
@ -74,9 +94,7 @@ public final class FetchSearchResult extends SearchPhaseResult {
|
||||||
return counter++;
|
return counter++;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
public ProfileResult profileResult() {
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
return profileResult;
|
||||||
contextId.writeTo(out);
|
|
||||||
hits.writeTo(out);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,6 +12,7 @@ import org.apache.lucene.index.LeafReaderContext;
|
||||||
import org.elasticsearch.search.fetch.FetchSubPhase.HitContext;
|
import org.elasticsearch.search.fetch.FetchSubPhase.HitContext;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Executes the logic for a {@link FetchSubPhase} against a particular leaf reader and hit
|
* Executes the logic for a {@link FetchSubPhase} against a particular leaf reader and hit
|
||||||
|
@ -28,4 +29,11 @@ public interface FetchSubPhaseProcessor {
|
||||||
*/
|
*/
|
||||||
void process(HitContext hitContext) throws IOException;
|
void process(HitContext hitContext) throws IOException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Called when profiling after processing all documents to get any extra
|
||||||
|
* debug information the phase collected.
|
||||||
|
*/
|
||||||
|
default Map<String, Object> getDebugInfo() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,7 +20,6 @@ import java.io.IOException;
|
||||||
* Explains the scoring calculations for the top hits.
|
* Explains the scoring calculations for the top hits.
|
||||||
*/
|
*/
|
||||||
public final class ExplainPhase implements FetchSubPhase {
|
public final class ExplainPhase implements FetchSubPhase {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public FetchSubPhaseProcessor getProcessor(FetchContext context) {
|
public FetchSubPhaseProcessor getProcessor(FetchContext context) {
|
||||||
if (context.explain() == false) {
|
if (context.explain() == false) {
|
||||||
|
|
|
@ -26,7 +26,6 @@ import java.util.List;
|
||||||
* Specifying {@code "docvalue_fields": ["field1", "field2"]}
|
* Specifying {@code "docvalue_fields": ["field1", "field2"]}
|
||||||
*/
|
*/
|
||||||
public final class FetchDocValuesPhase implements FetchSubPhase {
|
public final class FetchDocValuesPhase implements FetchSubPhase {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public FetchSubPhaseProcessor getProcessor(FetchContext context) {
|
public FetchSubPhaseProcessor getProcessor(FetchContext context) {
|
||||||
FetchDocValuesContext dvContext = context.docValuesContext();
|
FetchDocValuesContext dvContext = context.docValuesContext();
|
||||||
|
|
|
@ -24,7 +24,6 @@ import java.util.Map;
|
||||||
* retrieves the field values from _source and returns them as document fields.
|
* retrieves the field values from _source and returns them as document fields.
|
||||||
*/
|
*/
|
||||||
public final class FetchFieldsPhase implements FetchSubPhase {
|
public final class FetchFieldsPhase implements FetchSubPhase {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) {
|
public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) {
|
||||||
FetchFieldsContext fetchFieldsContext = fetchContext.fetchFieldsContext();
|
FetchFieldsContext fetchFieldsContext = fetchContext.fetchFieldsContext();
|
||||||
|
|
|
@ -21,7 +21,6 @@ import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
public class FetchScorePhase implements FetchSubPhase {
|
public class FetchScorePhase implements FetchSubPhase {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public FetchSubPhaseProcessor getProcessor(FetchContext context) throws IOException {
|
public FetchSubPhaseProcessor getProcessor(FetchContext context) throws IOException {
|
||||||
if (context.fetchScores() == false) {
|
if (context.fetchScores() == false) {
|
||||||
|
|
|
@ -23,7 +23,6 @@ import java.io.IOException;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
public final class FetchSourcePhase implements FetchSubPhase {
|
public final class FetchSourcePhase implements FetchSubPhase {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) {
|
public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) {
|
||||||
FetchSourceContext fetchSourceContext = fetchContext.fetchSourceContext();
|
FetchSourceContext fetchSourceContext = fetchContext.fetchSourceContext();
|
||||||
|
@ -34,6 +33,8 @@ public final class FetchSourcePhase implements FetchSubPhase {
|
||||||
assert fetchSourceContext.fetchSource();
|
assert fetchSourceContext.fetchSource();
|
||||||
|
|
||||||
return new FetchSubPhaseProcessor() {
|
return new FetchSubPhaseProcessor() {
|
||||||
|
private int fastPath;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void setNextReader(LeafReaderContext readerContext) {
|
public void setNextReader(LeafReaderContext readerContext) {
|
||||||
|
|
||||||
|
@ -50,46 +51,51 @@ public final class FetchSourcePhase implements FetchSubPhase {
|
||||||
}
|
}
|
||||||
hitExecute(fetchSourceContext, hitContext);
|
hitExecute(fetchSourceContext, hitContext);
|
||||||
}
|
}
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
private void hitExecute(FetchSourceContext fetchSourceContext, HitContext hitContext) {
|
private void hitExecute(FetchSourceContext fetchSourceContext, HitContext hitContext) {
|
||||||
|
final boolean nestedHit = hitContext.hit().getNestedIdentity() != null;
|
||||||
|
SourceLookup source = hitContext.sourceLookup();
|
||||||
|
|
||||||
final boolean nestedHit = hitContext.hit().getNestedIdentity() != null;
|
// If this is a parent document and there are no source filters, then add the source as-is.
|
||||||
SourceLookup source = hitContext.sourceLookup();
|
if (nestedHit == false && containsFilters(fetchSourceContext) == false) {
|
||||||
|
hitContext.hit().sourceRef(source.internalSourceRef());
|
||||||
|
fastPath++;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
// If this is a parent document and there are no source filters, then add the source as-is.
|
// Otherwise, filter the source and add it to the hit.
|
||||||
if (nestedHit == false && containsFilters(fetchSourceContext) == false) {
|
Object value = source.filter(fetchSourceContext);
|
||||||
hitContext.hit().sourceRef(source.internalSourceRef());
|
if (nestedHit) {
|
||||||
return;
|
value = getNestedSource((Map<String, Object>) value, hitContext);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Otherwise, filter the source and add it to the hit.
|
try {
|
||||||
Object value = source.filter(fetchSourceContext);
|
final int initialCapacity = nestedHit ? 1024 : Math.min(1024, source.internalSourceRef().length());
|
||||||
if (nestedHit) {
|
BytesStreamOutput streamOutput = new BytesStreamOutput(initialCapacity);
|
||||||
value = getNestedSource((Map<String, Object>) value, hitContext);
|
XContentBuilder builder = new XContentBuilder(source.sourceContentType().xContent(), streamOutput);
|
||||||
}
|
if (value != null) {
|
||||||
|
builder.value(value);
|
||||||
try {
|
} else {
|
||||||
final int initialCapacity = nestedHit ? 1024 : Math.min(1024, source.internalSourceRef().length());
|
// This happens if the source filtering could not find the specified in the _source.
|
||||||
BytesStreamOutput streamOutput = new BytesStreamOutput(initialCapacity);
|
// Just doing `builder.value(null)` is valid, but the xcontent validation can't detect what format
|
||||||
XContentBuilder builder = new XContentBuilder(source.sourceContentType().xContent(), streamOutput);
|
// it is. In certain cases, for example response serialization we fail if no xcontent type can't be
|
||||||
if (value != null) {
|
// detected. So instead we just return an empty top level object. Also this is in inline with what was
|
||||||
builder.value(value);
|
// being return in this situation in 5.x and earlier.
|
||||||
} else {
|
builder.startObject();
|
||||||
// This happens if the source filtering could not find the specified in the _source.
|
builder.endObject();
|
||||||
// Just doing `builder.value(null)` is valid, but the xcontent validation can't detect what format
|
}
|
||||||
// it is. In certain cases, for example response serialization we fail if no xcontent type can't be
|
hitContext.hit().sourceRef(BytesReference.bytes(builder));
|
||||||
// detected. So instead we just return an empty top level object. Also this is in inline with what was
|
} catch (IOException e) {
|
||||||
// being return in this situation in 5.x and earlier.
|
throw new ElasticsearchException("Error filtering source", e);
|
||||||
builder.startObject();
|
}
|
||||||
builder.endObject();
|
|
||||||
}
|
}
|
||||||
hitContext.hit().sourceRef(BytesReference.bytes(builder));
|
|
||||||
} catch (IOException e) {
|
@Override
|
||||||
throw new ElasticsearchException("Error filtering source", e);
|
public Map<String, Object> getDebugInfo() {
|
||||||
}
|
return Map.of("fast_path", fastPath);
|
||||||
|
}
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
private static boolean containsFilters(FetchSourceContext context) {
|
private static boolean containsFilters(FetchSourceContext context) {
|
||||||
|
|
|
@ -18,7 +18,6 @@ import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
public final class FetchVersionPhase implements FetchSubPhase {
|
public final class FetchVersionPhase implements FetchSubPhase {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public FetchSubPhaseProcessor getProcessor(FetchContext context) {
|
public FetchSubPhaseProcessor getProcessor(FetchContext context) {
|
||||||
if (context.version() == false) {
|
if (context.version() == false) {
|
||||||
|
|
|
@ -35,7 +35,7 @@ public final class InnerHitsPhase implements FetchSubPhase {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public FetchSubPhaseProcessor getProcessor(FetchContext searchContext) {
|
public FetchSubPhaseProcessor getProcessor(FetchContext searchContext) {
|
||||||
if (searchContext.innerHits() == null) {
|
if (searchContext.innerHits() == null || searchContext.innerHits().getInnerHits().isEmpty()) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
Map<String, InnerHitsContext.InnerHitSubContext> innerHits = searchContext.innerHits().getInnerHits();
|
Map<String, InnerHitsContext.InnerHitSubContext> innerHits = searchContext.innerHits().getInnerHits();
|
||||||
|
|
|
@ -25,7 +25,6 @@ import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
public final class MatchedQueriesPhase implements FetchSubPhase {
|
public final class MatchedQueriesPhase implements FetchSubPhase {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public FetchSubPhaseProcessor getProcessor(FetchContext context) throws IOException {
|
public FetchSubPhaseProcessor getProcessor(FetchContext context) throws IOException {
|
||||||
Map<String, Query> namedQueries = new HashMap<>();
|
Map<String, Query> namedQueries = new HashMap<>();
|
||||||
|
@ -72,5 +71,4 @@ public final class MatchedQueriesPhase implements FetchSubPhase {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,10 +22,9 @@ import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
public final class ScriptFieldsPhase implements FetchSubPhase {
|
public final class ScriptFieldsPhase implements FetchSubPhase {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public FetchSubPhaseProcessor getProcessor(FetchContext context) {
|
public FetchSubPhaseProcessor getProcessor(FetchContext context) {
|
||||||
if (context.scriptFields() == null) {
|
if (context.scriptFields() == null || context.scriptFields().fields().isEmpty()) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
List<ScriptFieldsContext.ScriptField> scriptFields = context.scriptFields().fields();
|
List<ScriptFieldsContext.ScriptField> scriptFields = context.scriptFields().fields();
|
||||||
|
|
|
@ -18,7 +18,6 @@ import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
public final class SeqNoPrimaryTermPhase implements FetchSubPhase {
|
public final class SeqNoPrimaryTermPhase implements FetchSubPhase {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public FetchSubPhaseProcessor getProcessor(FetchContext context) {
|
public FetchSubPhaseProcessor getProcessor(FetchContext context) {
|
||||||
if (context.seqNoAndPrimaryTerm() == false) {
|
if (context.seqNoAndPrimaryTerm() == false) {
|
||||||
|
|
|
@ -40,13 +40,13 @@ public class InternalSearchResponse extends SearchResponseSections implements Wr
|
||||||
|
|
||||||
public InternalSearchResponse(StreamInput in) throws IOException {
|
public InternalSearchResponse(StreamInput in) throws IOException {
|
||||||
super(
|
super(
|
||||||
new SearchHits(in),
|
new SearchHits(in),
|
||||||
in.readBoolean() ? InternalAggregations.readFrom(in) : null,
|
in.readBoolean() ? InternalAggregations.readFrom(in) : null,
|
||||||
in.readBoolean() ? new Suggest(in) : null,
|
in.readBoolean() ? new Suggest(in) : null,
|
||||||
in.readBoolean(),
|
in.readBoolean(),
|
||||||
in.readOptionalBoolean(),
|
in.readOptionalBoolean(),
|
||||||
in.readOptionalWriteable(SearchProfileResults::new),
|
in.readOptionalWriteable(SearchProfileResults::new),
|
||||||
in.readVInt()
|
in.readVInt()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -9,15 +9,16 @@
|
||||||
package org.elasticsearch.search.profile;
|
package org.elasticsearch.search.profile;
|
||||||
|
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.common.xcontent.ParseField;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
import org.elasticsearch.core.TimeValue;
|
|
||||||
import org.elasticsearch.common.xcontent.InstantiatingObjectParser;
|
import org.elasticsearch.common.xcontent.InstantiatingObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ParseField;
|
||||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.core.TimeValue;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
@ -26,17 +27,13 @@ import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
|
import static java.util.stream.Collectors.toMap;
|
||||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class is the internal representation of a profiled Query, corresponding
|
* The result of a profiled *thing*, like a query or an aggregation. See
|
||||||
* to a single node in the query tree. It is built after the query has finished executing
|
* {@link AbstractProfiler} for the statistic collection framework.
|
||||||
* and is merely a structured representation, rather than the entity that collects the timing
|
|
||||||
* profile (see InternalProfiler for that)
|
|
||||||
* <p>
|
|
||||||
* Each InternalProfileResult has a List of InternalProfileResults, which will contain
|
|
||||||
* "children" queries if applicable
|
|
||||||
*/
|
*/
|
||||||
public final class ProfileResult implements Writeable, ToXContentObject {
|
public final class ProfileResult implements Writeable, ToXContentObject {
|
||||||
static final ParseField TYPE = new ParseField("type");
|
static final ParseField TYPE = new ParseField("type");
|
||||||
|
@ -161,13 +158,41 @@ public final class ProfileResult implements Writeable, ToXContentObject {
|
||||||
return builder.endObject();
|
return builder.endObject();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
if (obj == null || getClass() != obj.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
ProfileResult other = (ProfileResult) obj;
|
||||||
|
return type.equals(other.type)
|
||||||
|
&& description.equals(other.description)
|
||||||
|
&& breakdown.equals(other.breakdown)
|
||||||
|
&& debug.equals(other.debug)
|
||||||
|
&& nodeTime == other.nodeTime
|
||||||
|
&& children.equals(other.children);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(type, description, breakdown, debug, nodeTime, children);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return Strings.toString(this);
|
||||||
|
}
|
||||||
|
|
||||||
private static final InstantiatingObjectParser<ProfileResult, Void> PARSER;
|
private static final InstantiatingObjectParser<ProfileResult, Void> PARSER;
|
||||||
static {
|
static {
|
||||||
InstantiatingObjectParser.Builder<ProfileResult, Void> parser =
|
InstantiatingObjectParser.Builder<ProfileResult, Void> parser =
|
||||||
InstantiatingObjectParser.builder("profile_result", true, ProfileResult.class);
|
InstantiatingObjectParser.builder("profile_result", true, ProfileResult.class);
|
||||||
parser.declareString(constructorArg(), TYPE);
|
parser.declareString(constructorArg(), TYPE);
|
||||||
parser.declareString(constructorArg(), DESCRIPTION);
|
parser.declareString(constructorArg(), DESCRIPTION);
|
||||||
parser.declareObject(constructorArg(), (p, c) -> p.map(), BREAKDOWN);
|
parser.declareObject(
|
||||||
|
constructorArg(),
|
||||||
|
(p, c) -> p.map().entrySet().stream().collect(toMap(Map.Entry::getKey, e -> ((Number) e.getValue()).longValue())),
|
||||||
|
BREAKDOWN
|
||||||
|
);
|
||||||
parser.declareObject(optionalConstructorArg(), (p, c) -> p.map(), DEBUG);
|
parser.declareObject(optionalConstructorArg(), (p, c) -> p.map(), DEBUG);
|
||||||
parser.declareLong(constructorArg(), NODE_TIME_RAW);
|
parser.declareLong(constructorArg(), NODE_TIME_RAW);
|
||||||
parser.declareObjectArray(optionalConstructorArg(), (p, c) -> fromXContent(p), CHILDREN);
|
parser.declareObjectArray(optionalConstructorArg(), (p, c) -> fromXContent(p), CHILDREN);
|
||||||
|
|
|
@ -8,8 +8,11 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.profile;
|
package org.elasticsearch.search.profile;
|
||||||
|
|
||||||
|
import org.elasticsearch.search.fetch.FetchProfiler;
|
||||||
import org.elasticsearch.search.internal.ContextIndexSearcher;
|
import org.elasticsearch.search.internal.ContextIndexSearcher;
|
||||||
|
import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResult;
|
||||||
import org.elasticsearch.search.profile.aggregation.AggregationProfiler;
|
import org.elasticsearch.search.profile.aggregation.AggregationProfiler;
|
||||||
|
import org.elasticsearch.search.profile.query.QueryProfileShardResult;
|
||||||
import org.elasticsearch.search.profile.query.QueryProfiler;
|
import org.elasticsearch.search.profile.query.QueryProfiler;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
@ -20,18 +23,17 @@ import java.util.List;
|
||||||
public final class Profilers {
|
public final class Profilers {
|
||||||
|
|
||||||
private final ContextIndexSearcher searcher;
|
private final ContextIndexSearcher searcher;
|
||||||
private final List<QueryProfiler> queryProfilers;
|
private final List<QueryProfiler> queryProfilers = new ArrayList<>();
|
||||||
private final AggregationProfiler aggProfiler;
|
private final AggregationProfiler aggProfiler = new AggregationProfiler();
|
||||||
|
|
||||||
/** Sole constructor. This {@link Profilers} instance will initially wrap one {@link QueryProfiler}. */
|
|
||||||
public Profilers(ContextIndexSearcher searcher) {
|
public Profilers(ContextIndexSearcher searcher) {
|
||||||
this.searcher = searcher;
|
this.searcher = searcher;
|
||||||
this.queryProfilers = new ArrayList<>();
|
|
||||||
this.aggProfiler = new AggregationProfiler();
|
|
||||||
addQueryProfiler();
|
addQueryProfiler();
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Switch to a new profile. */
|
/**
|
||||||
|
* Begin profiling a new query.
|
||||||
|
*/
|
||||||
public QueryProfiler addQueryProfiler() {
|
public QueryProfiler addQueryProfiler() {
|
||||||
QueryProfiler profiler = new QueryProfiler();
|
QueryProfiler profiler = new QueryProfiler();
|
||||||
searcher.setProfiler(profiler);
|
searcher.setProfiler(profiler);
|
||||||
|
@ -39,19 +41,45 @@ public final class Profilers {
|
||||||
return profiler;
|
return profiler;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Get the current profiler. */
|
/**
|
||||||
|
* Get the profiler for the query we are currently processing.
|
||||||
|
*/
|
||||||
public QueryProfiler getCurrentQueryProfiler() {
|
public QueryProfiler getCurrentQueryProfiler() {
|
||||||
return queryProfilers.get(queryProfilers.size() - 1);
|
return queryProfilers.get(queryProfilers.size() - 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Return the list of all created {@link QueryProfiler}s so far. */
|
/**
|
||||||
|
* The list of all {@link QueryProfiler}s created so far.
|
||||||
|
*/
|
||||||
public List<QueryProfiler> getQueryProfilers() {
|
public List<QueryProfiler> getQueryProfilers() {
|
||||||
return Collections.unmodifiableList(queryProfilers);
|
return Collections.unmodifiableList(queryProfilers);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Return the {@link AggregationProfiler}. */
|
|
||||||
public AggregationProfiler getAggregationProfiler() {
|
public AggregationProfiler getAggregationProfiler() {
|
||||||
return aggProfiler;
|
return aggProfiler;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build a profiler for the fetch phase.
|
||||||
|
*/
|
||||||
|
public FetchProfiler startProfilingFetchPhase() {
|
||||||
|
return new FetchProfiler();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build the results for the query phase.
|
||||||
|
*/
|
||||||
|
public SearchProfileQueryPhaseResult buildQueryPhaseResults() {
|
||||||
|
List<QueryProfileShardResult> queryResults = new ArrayList<>(queryProfilers.size());
|
||||||
|
for (QueryProfiler queryProfiler : queryProfilers) {
|
||||||
|
QueryProfileShardResult result = new QueryProfileShardResult(
|
||||||
|
queryProfiler.getTree(),
|
||||||
|
queryProfiler.getRewriteTime(),
|
||||||
|
queryProfiler.getCollector()
|
||||||
|
);
|
||||||
|
queryResults.add(result);
|
||||||
|
}
|
||||||
|
AggregationProfileShardResult aggResults = new AggregationProfileShardResult(aggProfiler.getTree());
|
||||||
|
return new SearchProfileQueryPhaseResult(queryResults, aggResults);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,6 +18,7 @@ import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Profile results from a shard for the search phase.
|
* Profile results from a shard for the search phase.
|
||||||
|
@ -63,4 +64,18 @@ public class SearchProfileQueryPhaseResult implements Writeable {
|
||||||
public AggregationProfileShardResult getAggregationProfileResults() {
|
public AggregationProfileShardResult getAggregationProfileResults() {
|
||||||
return aggProfileShardResult;
|
return aggProfileShardResult;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
if (obj == null || getClass() != obj.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
SearchProfileQueryPhaseResult other = (SearchProfileQueryPhaseResult) obj;
|
||||||
|
return queryProfileResults.equals(other.queryProfileResults) && aggProfileShardResult.equals(other.aggProfileShardResult);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(queryProfileResults, aggProfileShardResult);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
package org.elasticsearch.search.profile;
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
* or more contributor license agreements. Licensed under the Elastic License
|
* or more contributor license agreements. Licensed under the Elastic License
|
||||||
|
@ -8,6 +6,11 @@ package org.elasticsearch.search.profile;
|
||||||
* Side Public License, v 1.
|
* Side Public License, v 1.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.search.profile;
|
||||||
|
|
||||||
|
import org.elasticsearch.Version;
|
||||||
|
import org.elasticsearch.common.Strings;
|
||||||
|
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
|
@ -15,9 +18,7 @@ import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResult;
|
import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResult;
|
||||||
import org.elasticsearch.search.profile.aggregation.AggregationProfiler;
|
|
||||||
import org.elasticsearch.search.profile.query.QueryProfileShardResult;
|
import org.elasticsearch.search.profile.query.QueryProfileShardResult;
|
||||||
import org.elasticsearch.search.profile.query.QueryProfiler;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
@ -34,42 +35,42 @@ import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpect
|
||||||
*/
|
*/
|
||||||
public final class SearchProfileResults implements Writeable, ToXContentFragment {
|
public final class SearchProfileResults implements Writeable, ToXContentFragment {
|
||||||
|
|
||||||
private static final String SEARCHES_FIELD = "searches";
|
|
||||||
private static final String ID_FIELD = "id";
|
private static final String ID_FIELD = "id";
|
||||||
private static final String SHARDS_FIELD = "shards";
|
private static final String SHARDS_FIELD = "shards";
|
||||||
public static final String PROFILE_FIELD = "profile";
|
public static final String PROFILE_FIELD = "profile";
|
||||||
|
|
||||||
private Map<String, SearchProfileQueryPhaseResult> shardResults;
|
private Map<String, SearchProfileShardResult> shardResults;
|
||||||
|
|
||||||
public SearchProfileResults(Map<String, SearchProfileQueryPhaseResult> shardResults) {
|
public SearchProfileResults(Map<String, SearchProfileShardResult> shardResults) {
|
||||||
this.shardResults = Collections.unmodifiableMap(shardResults);
|
this.shardResults = Collections.unmodifiableMap(shardResults);
|
||||||
}
|
}
|
||||||
|
|
||||||
public SearchProfileResults(StreamInput in) throws IOException {
|
public SearchProfileResults(StreamInput in) throws IOException {
|
||||||
int size = in.readInt();
|
if (in.getVersion().onOrAfter(Version.V_8_0_0)) {
|
||||||
shardResults = new HashMap<>(size);
|
shardResults = in.readMap(StreamInput::readString, SearchProfileShardResult::new);
|
||||||
|
} else {
|
||||||
for (int i = 0; i < size; i++) {
|
// Before 8.0.0 we only send the query phase result
|
||||||
String key = in.readString();
|
shardResults = in.readMap(
|
||||||
SearchProfileQueryPhaseResult shardResult = new SearchProfileQueryPhaseResult(in);
|
StreamInput::readString,
|
||||||
shardResults.put(key, shardResult);
|
i -> new SearchProfileShardResult(new SearchProfileQueryPhaseResult(i), null)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
shardResults = Collections.unmodifiableMap(shardResults);
|
|
||||||
}
|
|
||||||
|
|
||||||
public Map<String, SearchProfileQueryPhaseResult> getShardResults() {
|
|
||||||
return this.shardResults;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void writeTo(StreamOutput out) throws IOException {
|
public void writeTo(StreamOutput out) throws IOException {
|
||||||
out.writeInt(shardResults.size());
|
if (out.getVersion().onOrAfter(Version.V_8_0_0)) {
|
||||||
for (Map.Entry<String, SearchProfileQueryPhaseResult> entry : shardResults.entrySet()) {
|
out.writeMap(shardResults, StreamOutput::writeString, (o, r) -> r.writeTo(o));
|
||||||
out.writeString(entry.getKey());
|
} else {
|
||||||
entry.getValue().writeTo(out);
|
// Before 8.0.0 we only send the query phase
|
||||||
|
out.writeMap(shardResults, StreamOutput::writeString, (o, r) -> r.getQueryPhase().writeTo(o));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Map<String, SearchProfileShardResult> getShardResults() {
|
||||||
|
return shardResults;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.startObject(PROFILE_FIELD).startArray(SHARDS_FIELD);
|
builder.startObject(PROFILE_FIELD).startArray(SHARDS_FIELD);
|
||||||
|
@ -79,28 +80,41 @@ public final class SearchProfileResults implements Writeable, ToXContentFragment
|
||||||
for (String key : sortedKeys) {
|
for (String key : sortedKeys) {
|
||||||
builder.startObject();
|
builder.startObject();
|
||||||
builder.field(ID_FIELD, key);
|
builder.field(ID_FIELD, key);
|
||||||
builder.startArray(SEARCHES_FIELD);
|
shardResults.get(key).toXContent(builder, params);
|
||||||
SearchProfileQueryPhaseResult profileShardResult = shardResults.get(key);
|
|
||||||
for (QueryProfileShardResult result : profileShardResult.getQueryProfileResults()) {
|
|
||||||
result.toXContent(builder, params);
|
|
||||||
}
|
|
||||||
builder.endArray();
|
|
||||||
profileShardResult.getAggregationProfileResults().toXContent(builder, params);
|
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
}
|
}
|
||||||
builder.endArray().endObject();
|
builder.endArray().endObject();
|
||||||
return builder;
|
return builder;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
if (obj == null || getClass() != obj.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
SearchProfileResults other = (SearchProfileResults) obj;
|
||||||
|
return shardResults.equals(other.shardResults);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return shardResults.hashCode();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return Strings.toString(this);
|
||||||
|
}
|
||||||
|
|
||||||
public static SearchProfileResults fromXContent(XContentParser parser) throws IOException {
|
public static SearchProfileResults fromXContent(XContentParser parser) throws IOException {
|
||||||
XContentParser.Token token = parser.currentToken();
|
XContentParser.Token token = parser.currentToken();
|
||||||
ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser);
|
ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser);
|
||||||
Map<String, SearchProfileQueryPhaseResult> searchProfileResults = new HashMap<>();
|
Map<String, SearchProfileShardResult> profileResults = new HashMap<>();
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||||
if (token == XContentParser.Token.START_ARRAY) {
|
if (token == XContentParser.Token.START_ARRAY) {
|
||||||
if (SHARDS_FIELD.equals(parser.currentName())) {
|
if (SHARDS_FIELD.equals(parser.currentName())) {
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||||
parseSearchProfileResultsEntry(parser, searchProfileResults);
|
parseProfileResultsEntry(parser, profileResults);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
parser.skipChildren();
|
parser.skipChildren();
|
||||||
|
@ -109,15 +123,16 @@ public final class SearchProfileResults implements Writeable, ToXContentFragment
|
||||||
parser.skipChildren();
|
parser.skipChildren();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return new SearchProfileResults(searchProfileResults);
|
return new SearchProfileResults(profileResults);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void parseSearchProfileResultsEntry(XContentParser parser,
|
private static void parseProfileResultsEntry(XContentParser parser,
|
||||||
Map<String, SearchProfileQueryPhaseResult> searchProfileResults) throws IOException {
|
Map<String, SearchProfileShardResult> searchProfileResults) throws IOException {
|
||||||
XContentParser.Token token = parser.currentToken();
|
XContentParser.Token token = parser.currentToken();
|
||||||
ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser);
|
ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser);
|
||||||
List<QueryProfileShardResult> queryProfileResults = new ArrayList<>();
|
List<QueryProfileShardResult> queryProfileResults = new ArrayList<>();
|
||||||
AggregationProfileShardResult aggProfileShardResult = null;
|
AggregationProfileShardResult aggProfileShardResult = null;
|
||||||
|
ProfileResult fetchResult = null;
|
||||||
String id = null;
|
String id = null;
|
||||||
String currentFieldName = null;
|
String currentFieldName = null;
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||||
|
@ -130,7 +145,7 @@ public final class SearchProfileResults implements Writeable, ToXContentFragment
|
||||||
parser.skipChildren();
|
parser.skipChildren();
|
||||||
}
|
}
|
||||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||||
if (SEARCHES_FIELD.equals(currentFieldName)) {
|
if ("searches".equals(currentFieldName)) {
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||||
queryProfileResults.add(QueryProfileShardResult.fromXContent(parser));
|
queryProfileResults.add(QueryProfileShardResult.fromXContent(parser));
|
||||||
}
|
}
|
||||||
|
@ -139,32 +154,16 @@ public final class SearchProfileResults implements Writeable, ToXContentFragment
|
||||||
} else {
|
} else {
|
||||||
parser.skipChildren();
|
parser.skipChildren();
|
||||||
}
|
}
|
||||||
|
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||||
|
fetchResult = ProfileResult.fromXContent(parser);
|
||||||
} else {
|
} else {
|
||||||
parser.skipChildren();
|
parser.skipChildren();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
searchProfileResults.put(id, new SearchProfileQueryPhaseResult(queryProfileResults, aggProfileShardResult));
|
SearchProfileShardResult result = new SearchProfileShardResult(
|
||||||
}
|
new SearchProfileQueryPhaseResult(queryProfileResults, aggProfileShardResult),
|
||||||
|
fetchResult
|
||||||
/**
|
);
|
||||||
* Helper method to convert Profiler into InternalProfileShardResults, which
|
searchProfileResults.put(id, result);
|
||||||
* can be serialized to other nodes, emitted as JSON, etc.
|
|
||||||
*
|
|
||||||
* @param profilers
|
|
||||||
* The {@link Profilers} to convert into results
|
|
||||||
* @return A {@link SearchProfileQueryPhaseResult} representing the results for this
|
|
||||||
* shard
|
|
||||||
*/
|
|
||||||
public static SearchProfileQueryPhaseResult buildShardResults(Profilers profilers) {
|
|
||||||
List<QueryProfiler> queryProfilers = profilers.getQueryProfilers();
|
|
||||||
AggregationProfiler aggProfiler = profilers.getAggregationProfiler();
|
|
||||||
List<QueryProfileShardResult> queryResults = new ArrayList<>(queryProfilers.size());
|
|
||||||
for (QueryProfiler queryProfiler : queryProfilers) {
|
|
||||||
QueryProfileShardResult result = new QueryProfileShardResult(queryProfiler.getTree(), queryProfiler.getRewriteTime(),
|
|
||||||
queryProfiler.getCollector());
|
|
||||||
queryResults.add(result);
|
|
||||||
}
|
|
||||||
AggregationProfileShardResult aggResults = new AggregationProfileShardResult(aggProfiler.getTree());
|
|
||||||
return new SearchProfileQueryPhaseResult(queryResults, aggResults);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,70 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License
|
||||||
|
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||||
|
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||||
|
* Side Public License, v 1.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.search.profile;
|
||||||
|
|
||||||
|
import org.elasticsearch.search.SearchPhaseResult;
|
||||||
|
import org.elasticsearch.search.fetch.FetchSearchResult;
|
||||||
|
|
||||||
|
import java.util.Collection;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Profile results for the query phase run on all shards.
|
||||||
|
*/
|
||||||
|
public class SearchProfileResultsBuilder {
|
||||||
|
private final Map<String, SearchProfileQueryPhaseResult> queryPhaseResults;
|
||||||
|
|
||||||
|
public SearchProfileResultsBuilder(Map<String, SearchProfileQueryPhaseResult> queryPhaseResults) {
|
||||||
|
this.queryPhaseResults = Collections.unmodifiableMap(queryPhaseResults);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Merge the profiling information from some fetch results into this
|
||||||
|
* profiling information.
|
||||||
|
*/
|
||||||
|
public SearchProfileResults build(Collection<? extends SearchPhaseResult> fetchResults) {
|
||||||
|
Map<String, SearchProfileShardResult> mergedShardResults = new HashMap<>(queryPhaseResults.size());
|
||||||
|
for (SearchPhaseResult r : fetchResults) {
|
||||||
|
FetchSearchResult fr = r.fetchResult();
|
||||||
|
String key = fr.getSearchShardTarget().toString();
|
||||||
|
SearchProfileQueryPhaseResult queryPhase = queryPhaseResults.get(key);
|
||||||
|
if (queryPhase == null) {
|
||||||
|
throw new IllegalStateException(
|
||||||
|
"Profile returned fetch phase information for ["
|
||||||
|
+ key
|
||||||
|
+ "] but didn't return query phase information. Query phase keys were "
|
||||||
|
+ queryPhaseResults.keySet()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
mergedShardResults.put(key, new SearchProfileShardResult(queryPhase, fr.profileResult()));
|
||||||
|
}
|
||||||
|
for (Map.Entry<String, SearchProfileQueryPhaseResult> e : queryPhaseResults.entrySet()) {
|
||||||
|
if (false == mergedShardResults.containsKey(e.getKey())) {
|
||||||
|
mergedShardResults.put(e.getKey(), new SearchProfileShardResult(e.getValue(), null));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return new SearchProfileResults(mergedShardResults);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
if (obj == null || obj.getClass() != getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
SearchProfileResultsBuilder other = (SearchProfileResultsBuilder) obj;
|
||||||
|
return queryPhaseResults.equals(other.queryPhaseResults);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return queryPhaseResults.hashCode();
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,98 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License
|
||||||
|
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||||
|
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||||
|
* Side Public License, v 1.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.search.profile;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.Strings;
|
||||||
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.core.Nullable;
|
||||||
|
import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResult;
|
||||||
|
import org.elasticsearch.search.profile.query.QueryProfileShardResult;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Profile results from a particular shard for all search phases.
|
||||||
|
*/
|
||||||
|
public class SearchProfileShardResult implements Writeable, ToXContentFragment {
|
||||||
|
private final SearchProfileQueryPhaseResult queryPhase;
|
||||||
|
|
||||||
|
private final ProfileResult fetchPhase;
|
||||||
|
|
||||||
|
public SearchProfileShardResult(SearchProfileQueryPhaseResult queryPhase, @Nullable ProfileResult fetch) {
|
||||||
|
this.queryPhase = queryPhase;
|
||||||
|
this.fetchPhase = fetch;
|
||||||
|
}
|
||||||
|
|
||||||
|
public SearchProfileShardResult(StreamInput in) throws IOException {
|
||||||
|
queryPhase = new SearchProfileQueryPhaseResult(in);
|
||||||
|
fetchPhase = in.readOptionalWriteable(ProfileResult::new);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void writeTo(StreamOutput out) throws IOException {
|
||||||
|
queryPhase.writeTo(out);
|
||||||
|
out.writeOptionalWriteable(fetchPhase);
|
||||||
|
}
|
||||||
|
|
||||||
|
public SearchProfileQueryPhaseResult getQueryPhase() {
|
||||||
|
return queryPhase;
|
||||||
|
}
|
||||||
|
|
||||||
|
public ProfileResult getFetchPhase() {
|
||||||
|
return fetchPhase;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<QueryProfileShardResult> getQueryProfileResults() {
|
||||||
|
return queryPhase.getQueryProfileResults();
|
||||||
|
}
|
||||||
|
|
||||||
|
public AggregationProfileShardResult getAggregationProfileResults() {
|
||||||
|
return queryPhase.getAggregationProfileResults();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
|
builder.startArray("searches");
|
||||||
|
for (QueryProfileShardResult result : queryPhase.getQueryProfileResults()) {
|
||||||
|
result.toXContent(builder, params);
|
||||||
|
}
|
||||||
|
builder.endArray();
|
||||||
|
queryPhase.getAggregationProfileResults().toXContent(builder, params);
|
||||||
|
if (fetchPhase != null) {
|
||||||
|
builder.field("fetch");
|
||||||
|
fetchPhase.toXContent(builder, params);
|
||||||
|
}
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
if (obj == null || getClass() != obj.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
SearchProfileShardResult other = (SearchProfileShardResult) obj;
|
||||||
|
return queryPhase.equals(other.queryPhase) && Objects.equals(fetchPhase, other.fetchPhase);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(queryPhase, fetchPhase);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return Strings.toString(this);
|
||||||
|
}
|
||||||
|
}
|
|
@ -20,6 +20,7 @@ import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
|
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
|
||||||
|
|
||||||
|
@ -55,7 +56,6 @@ public final class AggregationProfileShardResult implements Writeable, ToXConten
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public List<ProfileResult> getProfileResults() {
|
public List<ProfileResult> getProfileResults() {
|
||||||
return Collections.unmodifiableList(aggProfileResults);
|
return Collections.unmodifiableList(aggProfileResults);
|
||||||
}
|
}
|
||||||
|
@ -70,6 +70,25 @@ public final class AggregationProfileShardResult implements Writeable, ToXConten
|
||||||
return builder;
|
return builder;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
if (obj == null || getClass() != obj.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
AggregationProfileShardResult other = (AggregationProfileShardResult) obj;
|
||||||
|
return aggProfileResults.equals(other.aggProfileResults);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return aggProfileResults.hashCode();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return Objects.toString(this);
|
||||||
|
}
|
||||||
|
|
||||||
public static AggregationProfileShardResult fromXContent(XContentParser parser) throws IOException {
|
public static AggregationProfileShardResult fromXContent(XContentParser parser) throws IOException {
|
||||||
XContentParser.Token token = parser.currentToken();
|
XContentParser.Token token = parser.currentToken();
|
||||||
ensureExpectedToken(XContentParser.Token.START_ARRAY, token, parser);
|
ensureExpectedToken(XContentParser.Token.START_ARRAY, token, parser);
|
||||||
|
|
|
@ -9,6 +9,7 @@
|
||||||
package org.elasticsearch.search.profile.query;
|
package org.elasticsearch.search.profile.query;
|
||||||
|
|
||||||
import org.elasticsearch.common.xcontent.ParseField;
|
import org.elasticsearch.common.xcontent.ParseField;
|
||||||
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
|
@ -21,6 +22,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Objects;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
|
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
|
||||||
|
@ -60,14 +62,14 @@ public class CollectorResult implements ToXContentObject, Writeable {
|
||||||
/**
|
/**
|
||||||
* The total elapsed time for this Collector
|
* The total elapsed time for this Collector
|
||||||
*/
|
*/
|
||||||
private final Long time;
|
private final long time;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A list of children collectors "embedded" inside this collector
|
* A list of children collectors "embedded" inside this collector
|
||||||
*/
|
*/
|
||||||
private List<CollectorResult> children;
|
private List<CollectorResult> children;
|
||||||
|
|
||||||
public CollectorResult(String collectorName, String reason, Long time, List<CollectorResult> children) {
|
public CollectorResult(String collectorName, String reason, long time, List<CollectorResult> children) {
|
||||||
this.collectorName = collectorName;
|
this.collectorName = collectorName;
|
||||||
this.reason = reason;
|
this.reason = reason;
|
||||||
this.time = time;
|
this.time = time;
|
||||||
|
@ -128,6 +130,28 @@ public class CollectorResult implements ToXContentObject, Writeable {
|
||||||
return children;
|
return children;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
if (obj == null || getClass() != obj.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
CollectorResult other = (CollectorResult) obj;
|
||||||
|
return collectorName.equals(other.collectorName)
|
||||||
|
&& reason.equals(other.reason)
|
||||||
|
&& time == other.time
|
||||||
|
&& children.equals(other.children);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(collectorName, reason, time, children);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return Strings.toString(this);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
|
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||||
builder = builder.startObject();
|
builder = builder.startObject();
|
||||||
|
|
|
@ -8,6 +8,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.profile.query;
|
package org.elasticsearch.search.profile.query;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.io.stream.StreamInput;
|
import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
|
@ -20,6 +21,7 @@ import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
|
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
|
||||||
|
|
||||||
|
@ -100,6 +102,27 @@ public final class QueryProfileShardResult implements Writeable, ToXContentObjec
|
||||||
return builder;
|
return builder;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
if (obj == null || getClass() != obj.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
QueryProfileShardResult other = (QueryProfileShardResult) obj;
|
||||||
|
return queryProfileResults.equals(other.queryProfileResults)
|
||||||
|
&& profileCollector.equals(other.profileCollector)
|
||||||
|
&& rewriteTime == other.rewriteTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(queryProfileResults, profileCollector, rewriteTime);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return Strings.toString(this);
|
||||||
|
}
|
||||||
|
|
||||||
public static QueryProfileShardResult fromXContent(XContentParser parser) throws IOException {
|
public static QueryProfileShardResult fromXContent(XContentParser parser) throws IOException {
|
||||||
XContentParser.Token token = parser.currentToken();
|
XContentParser.Token token = parser.currentToken();
|
||||||
ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser);
|
ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser);
|
||||||
|
|
|
@ -43,8 +43,6 @@ import org.elasticsearch.search.aggregations.AggregationPhase;
|
||||||
import org.elasticsearch.search.internal.ContextIndexSearcher;
|
import org.elasticsearch.search.internal.ContextIndexSearcher;
|
||||||
import org.elasticsearch.search.internal.ScrollContext;
|
import org.elasticsearch.search.internal.ScrollContext;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult;
|
|
||||||
import org.elasticsearch.search.profile.SearchProfileResults;
|
|
||||||
import org.elasticsearch.search.profile.query.InternalProfileCollector;
|
import org.elasticsearch.search.profile.query.InternalProfileCollector;
|
||||||
import org.elasticsearch.search.rescore.RescorePhase;
|
import org.elasticsearch.search.rescore.RescorePhase;
|
||||||
import org.elasticsearch.search.sort.SortAndFormats;
|
import org.elasticsearch.search.sort.SortAndFormats;
|
||||||
|
@ -128,9 +126,7 @@ public class QueryPhase {
|
||||||
aggregationPhase.execute(searchContext);
|
aggregationPhase.execute(searchContext);
|
||||||
|
|
||||||
if (searchContext.getProfilers() != null) {
|
if (searchContext.getProfilers() != null) {
|
||||||
SearchProfileQueryPhaseResult shardResults = SearchProfileResults
|
searchContext.queryResult().profileResults(searchContext.getProfilers().buildQueryPhaseResults());
|
||||||
.buildShardResults(searchContext.getProfilers());
|
|
||||||
searchContext.queryResult().profileResults(shardResults);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -26,15 +26,24 @@ import org.elasticsearch.search.fetch.FetchSearchResult;
|
||||||
import org.elasticsearch.search.fetch.QueryFetchSearchResult;
|
import org.elasticsearch.search.fetch.QueryFetchSearchResult;
|
||||||
import org.elasticsearch.search.fetch.ShardFetchSearchRequest;
|
import org.elasticsearch.search.fetch.ShardFetchSearchRequest;
|
||||||
import org.elasticsearch.search.internal.ShardSearchContextId;
|
import org.elasticsearch.search.internal.ShardSearchContextId;
|
||||||
|
import org.elasticsearch.search.profile.ProfileResult;
|
||||||
|
import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult;
|
||||||
|
import org.elasticsearch.search.profile.SearchProfileShardResult;
|
||||||
import org.elasticsearch.search.query.QuerySearchResult;
|
import org.elasticsearch.search.query.QuerySearchResult;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.elasticsearch.test.InternalAggregationTestCase;
|
import org.elasticsearch.test.InternalAggregationTestCase;
|
||||||
import org.elasticsearch.transport.Transport;
|
import org.elasticsearch.transport.Transport;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
import java.util.concurrent.CountDownLatch;
|
import java.util.concurrent.CountDownLatch;
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.nullValue;
|
||||||
|
|
||||||
public class FetchSearchPhaseTests extends ESTestCase {
|
public class FetchSearchPhaseTests extends ESTestCase {
|
||||||
|
private static final long FETCH_PROFILE_TIME = 555;
|
||||||
|
|
||||||
public void testShortcutQueryAndFetchOptimization() {
|
public void testShortcutQueryAndFetchOptimization() {
|
||||||
SearchPhaseController controller = new SearchPhaseController(s -> InternalAggregationTestCase.emptyReduceContextBuilder());
|
SearchPhaseController controller = new SearchPhaseController(s -> InternalAggregationTestCase.emptyReduceContextBuilder());
|
||||||
|
@ -43,6 +52,7 @@ public class FetchSearchPhaseTests extends ESTestCase {
|
||||||
new NoopCircuitBreaker(CircuitBreaker.REQUEST), SearchProgressListener.NOOP,
|
new NoopCircuitBreaker(CircuitBreaker.REQUEST), SearchProgressListener.NOOP,
|
||||||
mockSearchPhaseContext.getRequest(), 1, exc -> {});
|
mockSearchPhaseContext.getRequest(), 1, exc -> {});
|
||||||
boolean hasHits = randomBoolean();
|
boolean hasHits = randomBoolean();
|
||||||
|
boolean profiled = hasHits && randomBoolean();
|
||||||
final int numHits;
|
final int numHits;
|
||||||
if (hasHits) {
|
if (hasHits) {
|
||||||
QuerySearchResult queryResult = new QuerySearchResult();
|
QuerySearchResult queryResult = new QuerySearchResult();
|
||||||
|
@ -50,9 +60,12 @@ public class FetchSearchPhaseTests extends ESTestCase {
|
||||||
new ShardId("index", "index", 0), null, OriginalIndices.NONE));
|
new ShardId("index", "index", 0), null, OriginalIndices.NONE));
|
||||||
queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO),
|
queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO),
|
||||||
new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 1.0F), new DocValueFormat[0]);
|
new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 1.0F), new DocValueFormat[0]);
|
||||||
|
addProfiling(profiled, queryResult);
|
||||||
queryResult.size(1);
|
queryResult.size(1);
|
||||||
FetchSearchResult fetchResult = new FetchSearchResult();
|
FetchSearchResult fetchResult = new FetchSearchResult();
|
||||||
fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(42)}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F));
|
fetchResult.setSearchShardTarget(queryResult.getSearchShardTarget());
|
||||||
|
SearchHits hits = new SearchHits(new SearchHit[] { new SearchHit(42) }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F);
|
||||||
|
fetchResult.shardResult(hits, fetchProfile(profiled));
|
||||||
QueryFetchSearchResult fetchSearchResult = new QueryFetchSearchResult(queryResult, fetchResult);
|
QueryFetchSearchResult fetchSearchResult = new QueryFetchSearchResult(queryResult, fetchResult);
|
||||||
fetchSearchResult.setShardIndex(0);
|
fetchSearchResult.setShardIndex(0);
|
||||||
results.consumeResult(fetchSearchResult, () -> {});
|
results.consumeResult(fetchSearchResult, () -> {});
|
||||||
|
@ -77,9 +90,21 @@ public class FetchSearchPhaseTests extends ESTestCase {
|
||||||
if (numHits != 0) {
|
if (numHits != 0) {
|
||||||
assertEquals(42, searchResponse.getHits().getAt(0).docId());
|
assertEquals(42, searchResponse.getHits().getAt(0).docId());
|
||||||
}
|
}
|
||||||
|
assertProfiles(profiled, 1, searchResponse);
|
||||||
assertTrue(mockSearchPhaseContext.releasedSearchContexts.isEmpty());
|
assertTrue(mockSearchPhaseContext.releasedSearchContexts.isEmpty());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void assertProfiles(boolean profiled, int totalShards, SearchResponse searchResponse) {
|
||||||
|
if (false == profiled) {
|
||||||
|
assertThat(searchResponse.getProfileResults(), equalTo(Map.of()));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
assertThat(searchResponse.getProfileResults().values().size(), equalTo(totalShards));
|
||||||
|
for (SearchProfileShardResult profileShardResult : searchResponse.getProfileResults().values()) {
|
||||||
|
assertThat(profileShardResult.getFetchPhase().getTime(), equalTo(FETCH_PROFILE_TIME));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public void testFetchTwoDocument() {
|
public void testFetchTwoDocument() {
|
||||||
MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2);
|
MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2);
|
||||||
SearchPhaseController controller = new SearchPhaseController(s -> InternalAggregationTestCase.emptyReduceContextBuilder());
|
SearchPhaseController controller = new SearchPhaseController(s -> InternalAggregationTestCase.emptyReduceContextBuilder());
|
||||||
|
@ -87,22 +112,26 @@ public class FetchSearchPhaseTests extends ESTestCase {
|
||||||
new NoopCircuitBreaker(CircuitBreaker.REQUEST), SearchProgressListener.NOOP,
|
new NoopCircuitBreaker(CircuitBreaker.REQUEST), SearchProgressListener.NOOP,
|
||||||
mockSearchPhaseContext.getRequest(), 2, exc -> {});
|
mockSearchPhaseContext.getRequest(), 2, exc -> {});
|
||||||
int resultSetSize = randomIntBetween(2, 10);
|
int resultSetSize = randomIntBetween(2, 10);
|
||||||
|
boolean profiled = randomBoolean();
|
||||||
|
|
||||||
ShardSearchContextId ctx1 = new ShardSearchContextId(UUIDs.base64UUID(), 123);
|
ShardSearchContextId ctx1 = new ShardSearchContextId(UUIDs.base64UUID(), 123);
|
||||||
QuerySearchResult queryResult = new QuerySearchResult(ctx1, new SearchShardTarget("node1", new ShardId("test", "na", 0),
|
SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE);
|
||||||
null, OriginalIndices.NONE), null);
|
QuerySearchResult queryResult = new QuerySearchResult(ctx1, shard1Target, null);
|
||||||
queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO),
|
queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO),
|
||||||
new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]);
|
new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]);
|
||||||
queryResult.size(resultSetSize); // the size of the result set
|
queryResult.size(resultSetSize); // the size of the result set
|
||||||
queryResult.setShardIndex(0);
|
queryResult.setShardIndex(0);
|
||||||
|
addProfiling(profiled, queryResult);
|
||||||
results.consumeResult(queryResult, () -> {});
|
results.consumeResult(queryResult, () -> {});
|
||||||
|
|
||||||
final ShardSearchContextId ctx2 = new ShardSearchContextId(UUIDs.base64UUID(), 321);
|
final ShardSearchContextId ctx2 = new ShardSearchContextId(UUIDs.base64UUID(), 321);
|
||||||
queryResult = new QuerySearchResult(
|
SearchShardTarget shard2Target = new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE);
|
||||||
ctx2, new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE), null);
|
queryResult = new QuerySearchResult(ctx2, shard2Target, null);
|
||||||
queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO),
|
queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO),
|
||||||
new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]);
|
new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]);
|
||||||
queryResult.size(resultSetSize);
|
queryResult.size(resultSetSize);
|
||||||
queryResult.setShardIndex(1);
|
queryResult.setShardIndex(1);
|
||||||
|
addProfiling(profiled, queryResult);
|
||||||
results.consumeResult(queryResult, () -> {});
|
results.consumeResult(queryResult, () -> {});
|
||||||
|
|
||||||
mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) {
|
mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) {
|
||||||
|
@ -110,14 +139,16 @@ public class FetchSearchPhaseTests extends ESTestCase {
|
||||||
public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRequest request, SearchTask task,
|
public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRequest request, SearchTask task,
|
||||||
SearchActionListener<FetchSearchResult> listener) {
|
SearchActionListener<FetchSearchResult> listener) {
|
||||||
FetchSearchResult fetchResult = new FetchSearchResult();
|
FetchSearchResult fetchResult = new FetchSearchResult();
|
||||||
|
SearchHits hits;
|
||||||
if (request.contextId().equals(ctx2)) {
|
if (request.contextId().equals(ctx2)) {
|
||||||
fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(84)},
|
fetchResult.setSearchShardTarget(shard2Target);
|
||||||
new TotalHits(1, TotalHits.Relation.EQUAL_TO), 2.0F));
|
hits = new SearchHits(new SearchHit[] { new SearchHit(84) }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 2.0F);
|
||||||
} else {
|
} else {
|
||||||
assertEquals(ctx1, request.contextId());
|
assertEquals(ctx1, request.contextId());
|
||||||
fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(42)},
|
fetchResult.setSearchShardTarget(shard1Target);
|
||||||
new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F));
|
hits = new SearchHits(new SearchHit[] { new SearchHit(42) }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F);
|
||||||
}
|
}
|
||||||
|
fetchResult.shardResult(hits, fetchProfile(profiled));
|
||||||
listener.onResponse(fetchResult);
|
listener.onResponse(fetchResult);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -138,6 +169,7 @@ public class FetchSearchPhaseTests extends ESTestCase {
|
||||||
assertEquals(42, searchResponse.getHits().getAt(1).docId());
|
assertEquals(42, searchResponse.getHits().getAt(1).docId());
|
||||||
assertEquals(0, searchResponse.getFailedShards());
|
assertEquals(0, searchResponse.getFailedShards());
|
||||||
assertEquals(2, searchResponse.getSuccessfulShards());
|
assertEquals(2, searchResponse.getSuccessfulShards());
|
||||||
|
assertProfiles(profiled, 2, searchResponse);
|
||||||
assertTrue(mockSearchPhaseContext.releasedSearchContexts.isEmpty());
|
assertTrue(mockSearchPhaseContext.releasedSearchContexts.isEmpty());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -148,21 +180,25 @@ public class FetchSearchPhaseTests extends ESTestCase {
|
||||||
new NoopCircuitBreaker(CircuitBreaker.REQUEST), SearchProgressListener.NOOP,
|
new NoopCircuitBreaker(CircuitBreaker.REQUEST), SearchProgressListener.NOOP,
|
||||||
mockSearchPhaseContext.getRequest(), 2, exc -> {});
|
mockSearchPhaseContext.getRequest(), 2, exc -> {});
|
||||||
int resultSetSize = randomIntBetween(2, 10);
|
int resultSetSize = randomIntBetween(2, 10);
|
||||||
|
boolean profiled = randomBoolean();
|
||||||
|
|
||||||
final ShardSearchContextId ctx = new ShardSearchContextId(UUIDs.base64UUID(), 123);
|
final ShardSearchContextId ctx = new ShardSearchContextId(UUIDs.base64UUID(), 123);
|
||||||
QuerySearchResult queryResult = new QuerySearchResult(ctx,
|
SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE);
|
||||||
new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE), null);
|
QuerySearchResult queryResult = new QuerySearchResult(ctx, shard1Target, null);
|
||||||
queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO),
|
queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO),
|
||||||
new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]);
|
new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]);
|
||||||
queryResult.size(resultSetSize); // the size of the result set
|
queryResult.size(resultSetSize); // the size of the result set
|
||||||
queryResult.setShardIndex(0);
|
queryResult.setShardIndex(0);
|
||||||
|
addProfiling(profiled, queryResult);
|
||||||
results.consumeResult(queryResult, () -> {});
|
results.consumeResult(queryResult, () -> {});
|
||||||
|
|
||||||
queryResult = new QuerySearchResult(new ShardSearchContextId("", 321),
|
SearchShardTarget shard2Target = new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE);
|
||||||
new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE), null);
|
queryResult = new QuerySearchResult(new ShardSearchContextId("", 321), shard2Target, null);
|
||||||
queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO),
|
queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO),
|
||||||
new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]);
|
new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]);
|
||||||
queryResult.size(resultSetSize);
|
queryResult.size(resultSetSize);
|
||||||
queryResult.setShardIndex(1);
|
queryResult.setShardIndex(1);
|
||||||
|
addProfiling(profiled, queryResult);
|
||||||
results.consumeResult(queryResult, () -> {});
|
results.consumeResult(queryResult, () -> {});
|
||||||
|
|
||||||
mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) {
|
mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) {
|
||||||
|
@ -171,13 +207,17 @@ public class FetchSearchPhaseTests extends ESTestCase {
|
||||||
SearchActionListener<FetchSearchResult> listener) {
|
SearchActionListener<FetchSearchResult> listener) {
|
||||||
if (request.contextId().getId() == 321) {
|
if (request.contextId().getId() == 321) {
|
||||||
FetchSearchResult fetchResult = new FetchSearchResult();
|
FetchSearchResult fetchResult = new FetchSearchResult();
|
||||||
fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(84)},
|
fetchResult.setSearchShardTarget(shard1Target);
|
||||||
new TotalHits(1, TotalHits.Relation.EQUAL_TO), 2.0F));
|
SearchHits hits = new SearchHits(
|
||||||
|
new SearchHit[] { new SearchHit(84) },
|
||||||
|
new TotalHits(1, TotalHits.Relation.EQUAL_TO),
|
||||||
|
2.0F
|
||||||
|
);
|
||||||
|
fetchResult.shardResult(hits, fetchProfile(profiled));
|
||||||
listener.onResponse(fetchResult);
|
listener.onResponse(fetchResult);
|
||||||
} else {
|
} else {
|
||||||
listener.onFailure(new MockDirectoryWrapper.FakeIOException());
|
listener.onFailure(new MockDirectoryWrapper.FakeIOException());
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
FetchSearchPhase phase = new FetchSearchPhase(results, controller, null, mockSearchPhaseContext,
|
FetchSearchPhase phase = new FetchSearchPhase(results, controller, null, mockSearchPhaseContext,
|
||||||
|
@ -199,6 +239,21 @@ public class FetchSearchPhaseTests extends ESTestCase {
|
||||||
assertEquals(1, searchResponse.getShardFailures().length);
|
assertEquals(1, searchResponse.getShardFailures().length);
|
||||||
assertTrue(searchResponse.getShardFailures()[0].getCause() instanceof MockDirectoryWrapper.FakeIOException);
|
assertTrue(searchResponse.getShardFailures()[0].getCause() instanceof MockDirectoryWrapper.FakeIOException);
|
||||||
assertEquals(1, mockSearchPhaseContext.releasedSearchContexts.size());
|
assertEquals(1, mockSearchPhaseContext.releasedSearchContexts.size());
|
||||||
|
if (profiled) {
|
||||||
|
/*
|
||||||
|
* Shard 2 failed to fetch but still searched so it will have
|
||||||
|
* profiling information for the search on both shards but only
|
||||||
|
* for the fetch on the successful shard.
|
||||||
|
*/
|
||||||
|
assertThat(searchResponse.getProfileResults().values().size(), equalTo(2));
|
||||||
|
assertThat(searchResponse.getProfileResults().get(shard1Target.toString()).getFetchPhase(), nullValue());
|
||||||
|
assertThat(
|
||||||
|
searchResponse.getProfileResults().get(shard2Target.toString()).getFetchPhase().getTime(),
|
||||||
|
equalTo(FETCH_PROFILE_TIME)
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
assertThat(searchResponse.getProfileResults(), equalTo(Map.of()));
|
||||||
|
}
|
||||||
assertTrue(mockSearchPhaseContext.releasedSearchContexts.contains(ctx));
|
assertTrue(mockSearchPhaseContext.releasedSearchContexts.contains(ctx));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -206,18 +261,22 @@ public class FetchSearchPhaseTests extends ESTestCase {
|
||||||
int resultSetSize = randomIntBetween(0, 100);
|
int resultSetSize = randomIntBetween(0, 100);
|
||||||
// we use at least 2 hits otherwise this is subject to single shard optimization and we trip an assert...
|
// we use at least 2 hits otherwise this is subject to single shard optimization and we trip an assert...
|
||||||
int numHits = randomIntBetween(2, 100); // also numshards --> 1 hit per shard
|
int numHits = randomIntBetween(2, 100); // also numshards --> 1 hit per shard
|
||||||
|
boolean profiled = randomBoolean();
|
||||||
|
|
||||||
SearchPhaseController controller = new SearchPhaseController(s -> InternalAggregationTestCase.emptyReduceContextBuilder());
|
SearchPhaseController controller = new SearchPhaseController(s -> InternalAggregationTestCase.emptyReduceContextBuilder());
|
||||||
MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(numHits);
|
MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(numHits);
|
||||||
QueryPhaseResultConsumer results = controller.newSearchPhaseResults(EsExecutors.DIRECT_EXECUTOR_SERVICE,
|
QueryPhaseResultConsumer results = controller.newSearchPhaseResults(EsExecutors.DIRECT_EXECUTOR_SERVICE,
|
||||||
new NoopCircuitBreaker(CircuitBreaker.REQUEST), SearchProgressListener.NOOP,
|
new NoopCircuitBreaker(CircuitBreaker.REQUEST), SearchProgressListener.NOOP,
|
||||||
mockSearchPhaseContext.getRequest(), numHits, exc -> {});
|
mockSearchPhaseContext.getRequest(), numHits, exc -> {});
|
||||||
|
SearchShardTarget[] shardTargets = new SearchShardTarget[numHits];
|
||||||
for (int i = 0; i < numHits; i++) {
|
for (int i = 0; i < numHits; i++) {
|
||||||
QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", i),
|
shardTargets[i] = new SearchShardTarget("node1", new ShardId("test", "na", i), null, OriginalIndices.NONE);
|
||||||
new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE), null);
|
QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", i), shardTargets[i], null);
|
||||||
queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO),
|
queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO),
|
||||||
new ScoreDoc[] {new ScoreDoc(i+1, i)}), i), new DocValueFormat[0]);
|
new ScoreDoc[] {new ScoreDoc(i+1, i)}), i), new DocValueFormat[0]);
|
||||||
queryResult.size(resultSetSize); // the size of the result set
|
queryResult.size(resultSetSize); // the size of the result set
|
||||||
queryResult.setShardIndex(i);
|
queryResult.setShardIndex(i);
|
||||||
|
addProfiling(profiled, queryResult);
|
||||||
results.consumeResult(queryResult, () -> {});
|
results.consumeResult(queryResult, () -> {});
|
||||||
}
|
}
|
||||||
mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) {
|
mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) {
|
||||||
|
@ -226,8 +285,13 @@ public class FetchSearchPhaseTests extends ESTestCase {
|
||||||
SearchActionListener<FetchSearchResult> listener) {
|
SearchActionListener<FetchSearchResult> listener) {
|
||||||
new Thread(() -> {
|
new Thread(() -> {
|
||||||
FetchSearchResult fetchResult = new FetchSearchResult();
|
FetchSearchResult fetchResult = new FetchSearchResult();
|
||||||
fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit((int) (request.contextId().getId()+1))},
|
fetchResult.setSearchShardTarget(shardTargets[(int) request.contextId().getId()]);
|
||||||
new TotalHits(1, TotalHits.Relation.EQUAL_TO), 100F));
|
SearchHits hits = new SearchHits(
|
||||||
|
new SearchHit[] { new SearchHit((int) (request.contextId().getId() + 1)) },
|
||||||
|
new TotalHits(1, TotalHits.Relation.EQUAL_TO),
|
||||||
|
100F
|
||||||
|
);
|
||||||
|
fetchResult.shardResult(hits, fetchProfile(profiled));
|
||||||
listener.onResponse(fetchResult);
|
listener.onResponse(fetchResult);
|
||||||
}).start();
|
}).start();
|
||||||
}
|
}
|
||||||
|
@ -257,6 +321,19 @@ public class FetchSearchPhaseTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
assertEquals(0, searchResponse.getFailedShards());
|
assertEquals(0, searchResponse.getFailedShards());
|
||||||
assertEquals(numHits, searchResponse.getSuccessfulShards());
|
assertEquals(numHits, searchResponse.getSuccessfulShards());
|
||||||
|
if (profiled) {
|
||||||
|
assertThat(searchResponse.getProfileResults().values().size(), equalTo(numHits));
|
||||||
|
int count = 0;
|
||||||
|
for (SearchProfileShardResult profileShardResult : searchResponse.getProfileResults().values()) {
|
||||||
|
if (profileShardResult.getFetchPhase() != null) {
|
||||||
|
count++;
|
||||||
|
assertThat(profileShardResult.getFetchPhase().getTime(), equalTo(FETCH_PROFILE_TIME));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assertThat(count, equalTo(Math.min(numHits, resultSetSize)));
|
||||||
|
} else {
|
||||||
|
assertThat(searchResponse.getProfileResults(), equalTo(Map.of()));
|
||||||
|
}
|
||||||
int sizeReleasedContexts = Math.max(0, numHits - resultSetSize); // all non fetched results will be freed
|
int sizeReleasedContexts = Math.max(0, numHits - resultSetSize); // all non fetched results will be freed
|
||||||
assertEquals(mockSearchPhaseContext.releasedSearchContexts.toString(),
|
assertEquals(mockSearchPhaseContext.releasedSearchContexts.toString(),
|
||||||
sizeReleasedContexts, mockSearchPhaseContext.releasedSearchContexts.size());
|
sizeReleasedContexts, mockSearchPhaseContext.releasedSearchContexts.size());
|
||||||
|
@ -270,22 +347,26 @@ public class FetchSearchPhaseTests extends ESTestCase {
|
||||||
new NoopCircuitBreaker(CircuitBreaker.REQUEST), SearchProgressListener.NOOP,
|
new NoopCircuitBreaker(CircuitBreaker.REQUEST), SearchProgressListener.NOOP,
|
||||||
mockSearchPhaseContext.getRequest(), 2, exc -> {});
|
mockSearchPhaseContext.getRequest(), 2, exc -> {});
|
||||||
int resultSetSize = randomIntBetween(2, 10);
|
int resultSetSize = randomIntBetween(2, 10);
|
||||||
QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", 123),
|
boolean profiled = randomBoolean();
|
||||||
new SearchShardTarget("node1", new ShardId("test", "na", 0),
|
|
||||||
null, OriginalIndices.NONE), null);
|
SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE);
|
||||||
|
QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", 123), shard1Target, null);
|
||||||
queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO),
|
queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO),
|
||||||
new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]);
|
new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]);
|
||||||
queryResult.size(resultSetSize); // the size of the result set
|
queryResult.size(resultSetSize); // the size of the result set
|
||||||
queryResult.setShardIndex(0);
|
queryResult.setShardIndex(0);
|
||||||
|
addProfiling(profiled, queryResult);
|
||||||
results.consumeResult(queryResult, () -> {});
|
results.consumeResult(queryResult, () -> {});
|
||||||
|
|
||||||
queryResult = new QuerySearchResult(new ShardSearchContextId("", 321),
|
SearchShardTarget shard2Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE);
|
||||||
new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE), null);
|
queryResult = new QuerySearchResult(new ShardSearchContextId("", 321), shard2Target, null);
|
||||||
queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO),
|
queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO),
|
||||||
new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]);
|
new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]);
|
||||||
queryResult.size(resultSetSize);
|
queryResult.size(resultSetSize);
|
||||||
queryResult.setShardIndex(1);
|
queryResult.setShardIndex(1);
|
||||||
|
addProfiling(profiled, queryResult);
|
||||||
results.consumeResult(queryResult, () -> {});
|
results.consumeResult(queryResult, () -> {});
|
||||||
|
|
||||||
AtomicInteger numFetches = new AtomicInteger(0);
|
AtomicInteger numFetches = new AtomicInteger(0);
|
||||||
mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) {
|
mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) {
|
||||||
@Override
|
@Override
|
||||||
|
@ -295,14 +376,16 @@ public class FetchSearchPhaseTests extends ESTestCase {
|
||||||
if (numFetches.incrementAndGet() == 1) {
|
if (numFetches.incrementAndGet() == 1) {
|
||||||
throw new RuntimeException("BOOM");
|
throw new RuntimeException("BOOM");
|
||||||
}
|
}
|
||||||
|
SearchHits hits;
|
||||||
if (request.contextId().getId() == 321) {
|
if (request.contextId().getId() == 321) {
|
||||||
fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(84)},
|
fetchResult.setSearchShardTarget(shard2Target);
|
||||||
new TotalHits(1, TotalHits.Relation.EQUAL_TO), 2.0F));
|
hits = new SearchHits(new SearchHit[] { new SearchHit(84) }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 2.0F);
|
||||||
} else {
|
} else {
|
||||||
|
fetchResult.setSearchShardTarget(shard1Target);
|
||||||
assertEquals(request, 123);
|
assertEquals(request, 123);
|
||||||
fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(42)},
|
hits = new SearchHits(new SearchHit[] { new SearchHit(42) }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F);
|
||||||
new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F));
|
|
||||||
}
|
}
|
||||||
|
fetchResult.shardResult(hits, fetchProfile(profiled));
|
||||||
listener.onResponse(fetchResult);
|
listener.onResponse(fetchResult);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -328,22 +411,26 @@ public class FetchSearchPhaseTests extends ESTestCase {
|
||||||
new NoopCircuitBreaker(CircuitBreaker.REQUEST), SearchProgressListener.NOOP,
|
new NoopCircuitBreaker(CircuitBreaker.REQUEST), SearchProgressListener.NOOP,
|
||||||
mockSearchPhaseContext.getRequest(), 2, exc -> {});
|
mockSearchPhaseContext.getRequest(), 2, exc -> {});
|
||||||
int resultSetSize = 1;
|
int resultSetSize = 1;
|
||||||
|
boolean profiled = randomBoolean();
|
||||||
|
|
||||||
final ShardSearchContextId ctx1 = new ShardSearchContextId(UUIDs.base64UUID(), 123);
|
final ShardSearchContextId ctx1 = new ShardSearchContextId(UUIDs.base64UUID(), 123);
|
||||||
QuerySearchResult queryResult = new QuerySearchResult(ctx1,
|
SearchShardTarget shard1Target = new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE);
|
||||||
new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE), null);
|
QuerySearchResult queryResult = new QuerySearchResult(ctx1, shard1Target, null);
|
||||||
queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO),
|
queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO),
|
||||||
new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]);
|
new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]);
|
||||||
queryResult.size(resultSetSize); // the size of the result set
|
queryResult.size(resultSetSize); // the size of the result set
|
||||||
queryResult.setShardIndex(0);
|
queryResult.setShardIndex(0);
|
||||||
|
addProfiling(profiled, queryResult);
|
||||||
results.consumeResult(queryResult, () -> {});
|
results.consumeResult(queryResult, () -> {});
|
||||||
|
|
||||||
final ShardSearchContextId ctx2 = new ShardSearchContextId(UUIDs.base64UUID(), 321);
|
final ShardSearchContextId ctx2 = new ShardSearchContextId(UUIDs.base64UUID(), 321);
|
||||||
queryResult = new QuerySearchResult(ctx2,
|
SearchShardTarget shard2Target = new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE);
|
||||||
new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE), null);
|
queryResult = new QuerySearchResult(ctx2, shard2Target, null);
|
||||||
queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO),
|
queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO),
|
||||||
new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]);
|
new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]);
|
||||||
queryResult.size(resultSetSize);
|
queryResult.size(resultSetSize);
|
||||||
queryResult.setShardIndex(1);
|
queryResult.setShardIndex(1);
|
||||||
|
addProfiling(profiled, queryResult);
|
||||||
results.consumeResult(queryResult, () -> {});
|
results.consumeResult(queryResult, () -> {});
|
||||||
|
|
||||||
mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) {
|
mockSearchPhaseContext.searchTransport = new SearchTransportService(null, null, null) {
|
||||||
|
@ -352,8 +439,13 @@ public class FetchSearchPhaseTests extends ESTestCase {
|
||||||
SearchActionListener<FetchSearchResult> listener) {
|
SearchActionListener<FetchSearchResult> listener) {
|
||||||
FetchSearchResult fetchResult = new FetchSearchResult();
|
FetchSearchResult fetchResult = new FetchSearchResult();
|
||||||
if (request.contextId().getId() == 321) {
|
if (request.contextId().getId() == 321) {
|
||||||
fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(84)},
|
fetchResult.setSearchShardTarget(shard1Target);
|
||||||
new TotalHits(1, TotalHits.Relation.EQUAL_TO), 2.0F));
|
SearchHits hits = new SearchHits(
|
||||||
|
new SearchHit[] { new SearchHit(84) },
|
||||||
|
new TotalHits(1, TotalHits.Relation.EQUAL_TO),
|
||||||
|
2.0F
|
||||||
|
);
|
||||||
|
fetchResult.shardResult(hits, fetchProfile(profiled));
|
||||||
} else {
|
} else {
|
||||||
fail("requestID 123 should not be fetched but was");
|
fail("requestID 123 should not be fetched but was");
|
||||||
}
|
}
|
||||||
|
@ -377,7 +469,25 @@ public class FetchSearchPhaseTests extends ESTestCase {
|
||||||
assertEquals(84, searchResponse.getHits().getAt(0).docId());
|
assertEquals(84, searchResponse.getHits().getAt(0).docId());
|
||||||
assertEquals(0, searchResponse.getFailedShards());
|
assertEquals(0, searchResponse.getFailedShards());
|
||||||
assertEquals(2, searchResponse.getSuccessfulShards());
|
assertEquals(2, searchResponse.getSuccessfulShards());
|
||||||
|
if (profiled) {
|
||||||
|
assertThat(searchResponse.getProfileResults().size(), equalTo(2));
|
||||||
|
assertThat(searchResponse.getProfileResults().get(shard1Target.toString()).getFetchPhase(), nullValue());
|
||||||
|
assertThat(
|
||||||
|
searchResponse.getProfileResults().get(shard2Target.toString()).getFetchPhase().getTime(),
|
||||||
|
equalTo(FETCH_PROFILE_TIME)
|
||||||
|
);
|
||||||
|
}
|
||||||
assertEquals(1, mockSearchPhaseContext.releasedSearchContexts.size());
|
assertEquals(1, mockSearchPhaseContext.releasedSearchContexts.size());
|
||||||
assertTrue(mockSearchPhaseContext.releasedSearchContexts.contains(ctx1));
|
assertTrue(mockSearchPhaseContext.releasedSearchContexts.contains(ctx1));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void addProfiling(boolean profiled, QuerySearchResult queryResult) {
|
||||||
|
if (profiled) {
|
||||||
|
queryResult.profileResults(new SearchProfileQueryPhaseResult(List.of(), null));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private ProfileResult fetchProfile(boolean profiled) {
|
||||||
|
return profiled ? new ProfileResult("fetch", "fetch", Map.of(), Map.of(), FETCH_PROFILE_TIME, List.of()) : null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -52,6 +52,9 @@ import org.elasticsearch.search.fetch.FetchSearchResult;
|
||||||
import org.elasticsearch.search.internal.InternalSearchResponse;
|
import org.elasticsearch.search.internal.InternalSearchResponse;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.search.internal.ShardSearchContextId;
|
import org.elasticsearch.search.internal.ShardSearchContextId;
|
||||||
|
import org.elasticsearch.search.profile.ProfileResult;
|
||||||
|
import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult;
|
||||||
|
import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResult;
|
||||||
import org.elasticsearch.search.query.QuerySearchResult;
|
import org.elasticsearch.search.query.QuerySearchResult;
|
||||||
import org.elasticsearch.search.suggest.SortBy;
|
import org.elasticsearch.search.suggest.SortBy;
|
||||||
import org.elasticsearch.search.suggest.Suggest;
|
import org.elasticsearch.search.suggest.Suggest;
|
||||||
|
@ -82,10 +85,16 @@ import java.util.stream.Collectors;
|
||||||
import static java.util.Collections.emptyList;
|
import static java.util.Collections.emptyList;
|
||||||
import static java.util.Collections.emptyMap;
|
import static java.util.Collections.emptyMap;
|
||||||
import static java.util.Collections.singletonList;
|
import static java.util.Collections.singletonList;
|
||||||
|
import static java.util.stream.Collectors.toList;
|
||||||
|
import static org.hamcrest.Matchers.anEmptyMap;
|
||||||
|
import static org.hamcrest.Matchers.both;
|
||||||
import static org.hamcrest.Matchers.containsString;
|
import static org.hamcrest.Matchers.containsString;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.greaterThan;
|
||||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||||
|
import static org.hamcrest.Matchers.hasSize;
|
||||||
import static org.hamcrest.Matchers.instanceOf;
|
import static org.hamcrest.Matchers.instanceOf;
|
||||||
|
import static org.hamcrest.Matchers.is;
|
||||||
import static org.hamcrest.Matchers.lessThan;
|
import static org.hamcrest.Matchers.lessThan;
|
||||||
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
||||||
|
|
||||||
|
@ -136,7 +145,7 @@ public class SearchPhaseControllerTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
int nShards = randomIntBetween(1, 20);
|
int nShards = randomIntBetween(1, 20);
|
||||||
int queryResultSize = randomBoolean() ? 0 : randomIntBetween(1, nShards * 2);
|
int queryResultSize = randomBoolean() ? 0 : randomIntBetween(1, nShards * 2);
|
||||||
AtomicArray<SearchPhaseResult> results = generateQueryResults(nShards, suggestions, queryResultSize, false);
|
AtomicArray<SearchPhaseResult> results = generateQueryResults(nShards, suggestions, queryResultSize, false, false);
|
||||||
Optional<SearchPhaseResult> first = results.asList().stream().findFirst();
|
Optional<SearchPhaseResult> first = results.asList().stream().findFirst();
|
||||||
int from = 0, size = 0;
|
int from = 0, size = 0;
|
||||||
if (first.isPresent()) {
|
if (first.isPresent()) {
|
||||||
|
@ -208,7 +217,7 @@ public class SearchPhaseControllerTests extends ESTestCase {
|
||||||
List<CompletionSuggestion> suggestions,
|
List<CompletionSuggestion> suggestions,
|
||||||
int searchHitsSize, boolean useConstantScore) throws Exception {
|
int searchHitsSize, boolean useConstantScore) throws Exception {
|
||||||
return RandomizedContext.current().runWithPrivateRandomness(seed,
|
return RandomizedContext.current().runWithPrivateRandomness(seed,
|
||||||
() -> generateQueryResults(nShards, suggestions, searchHitsSize, useConstantScore));
|
() -> generateQueryResults(nShards, suggestions, searchHitsSize, useConstantScore, false));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMerge() {
|
public void testMerge() {
|
||||||
|
@ -221,13 +230,26 @@ public class SearchPhaseControllerTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
int nShards = randomIntBetween(1, 20);
|
int nShards = randomIntBetween(1, 20);
|
||||||
int queryResultSize = randomBoolean() ? 0 : randomIntBetween(1, nShards * 2);
|
int queryResultSize = randomBoolean() ? 0 : randomIntBetween(1, nShards * 2);
|
||||||
AtomicArray<SearchPhaseResult> queryResults = generateQueryResults(nShards, suggestions, queryResultSize, false);
|
boolean profile = randomBoolean();
|
||||||
for (int trackTotalHits : new int[] { SearchContext.TRACK_TOTAL_HITS_DISABLED, SearchContext.TRACK_TOTAL_HITS_ACCURATE }) {
|
for (int trackTotalHits : new int[] { SearchContext.TRACK_TOTAL_HITS_DISABLED, SearchContext.TRACK_TOTAL_HITS_ACCURATE }) {
|
||||||
SearchPhaseController.ReducedQueryPhase reducedQueryPhase = searchPhaseController.reducedQueryPhase(queryResults.asList(),
|
AtomicArray<SearchPhaseResult> queryResults = generateQueryResults(nShards, suggestions, queryResultSize, false, profile);
|
||||||
new ArrayList<>(), new ArrayList<>(), new SearchPhaseController.TopDocsStats(trackTotalHits),
|
SearchPhaseController.ReducedQueryPhase reducedQueryPhase = searchPhaseController.reducedQueryPhase(
|
||||||
0, true, InternalAggregationTestCase.emptyReduceContextBuilder(), true);
|
queryResults.asList(),
|
||||||
AtomicArray<SearchPhaseResult> fetchResults = generateFetchResults(nShards,
|
new ArrayList<>(),
|
||||||
reducedQueryPhase.sortedTopDocs.scoreDocs, reducedQueryPhase.suggest);
|
new ArrayList<>(),
|
||||||
|
new SearchPhaseController.TopDocsStats(trackTotalHits),
|
||||||
|
0,
|
||||||
|
true,
|
||||||
|
InternalAggregationTestCase.emptyReduceContextBuilder(),
|
||||||
|
true
|
||||||
|
);
|
||||||
|
List<SearchShardTarget> shards = queryResults.asList().stream().map(SearchPhaseResult::getSearchShardTarget).collect(toList());
|
||||||
|
AtomicArray<SearchPhaseResult> fetchResults = generateFetchResults(
|
||||||
|
shards,
|
||||||
|
reducedQueryPhase.sortedTopDocs.scoreDocs,
|
||||||
|
reducedQueryPhase.suggest,
|
||||||
|
profile
|
||||||
|
);
|
||||||
InternalSearchResponse mergedResponse = searchPhaseController.merge(false,
|
InternalSearchResponse mergedResponse = searchPhaseController.merge(false,
|
||||||
reducedQueryPhase, fetchResults.asList(), fetchResults::get);
|
reducedQueryPhase, fetchResults.asList(), fetchResults::get);
|
||||||
if (trackTotalHits == SearchContext.TRACK_TOTAL_HITS_DISABLED) {
|
if (trackTotalHits == SearchContext.TRACK_TOTAL_HITS_DISABLED) {
|
||||||
|
@ -261,6 +283,21 @@ public class SearchPhaseControllerTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (profile) {
|
||||||
|
assertThat(mergedResponse.profile().entrySet(), hasSize(nShards));
|
||||||
|
assertThat( // All shards should have a query profile
|
||||||
|
mergedResponse.profile().toString(),
|
||||||
|
mergedResponse.profile().values().stream().filter(r -> r.getQueryProfileResults() != null).count(),
|
||||||
|
equalTo((long) nShards)
|
||||||
|
);
|
||||||
|
assertThat( // Some or all shards should have a fetch profile
|
||||||
|
mergedResponse.profile().toString(),
|
||||||
|
mergedResponse.profile().values().stream().filter(r -> r.getFetchPhase() != null).count(),
|
||||||
|
both(greaterThan(0L)).and(lessThanOrEqualTo((long) nShards))
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
assertThat(mergedResponse.profile(), is(anEmptyMap()));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -271,8 +308,13 @@ public class SearchPhaseControllerTests extends ESTestCase {
|
||||||
* {@link SearchPhaseController#reducedQueryPhase} does,
|
* {@link SearchPhaseController#reducedQueryPhase} does,
|
||||||
* meaning that the returned query results can be fed directly to {@link SearchPhaseController#sortDocs}
|
* meaning that the returned query results can be fed directly to {@link SearchPhaseController#sortDocs}
|
||||||
*/
|
*/
|
||||||
private static AtomicArray<SearchPhaseResult> generateQueryResults(int nShards, List<CompletionSuggestion> suggestions,
|
private static AtomicArray<SearchPhaseResult> generateQueryResults(
|
||||||
int searchHitsSize, boolean useConstantScore) {
|
int nShards,
|
||||||
|
List<CompletionSuggestion> suggestions,
|
||||||
|
int searchHitsSize,
|
||||||
|
boolean useConstantScore,
|
||||||
|
boolean profile
|
||||||
|
) {
|
||||||
AtomicArray<SearchPhaseResult> queryResults = new AtomicArray<>(nShards);
|
AtomicArray<SearchPhaseResult> queryResults = new AtomicArray<>(nShards);
|
||||||
for (int shardIndex = 0; shardIndex < nShards; shardIndex++) {
|
for (int shardIndex = 0; shardIndex < nShards; shardIndex++) {
|
||||||
String clusterAlias = randomBoolean() ? null : "remote";
|
String clusterAlias = randomBoolean() ? null : "remote";
|
||||||
|
@ -316,6 +358,11 @@ public class SearchPhaseControllerTests extends ESTestCase {
|
||||||
querySearchResult.size(searchHitsSize);
|
querySearchResult.size(searchHitsSize);
|
||||||
querySearchResult.suggest(new Suggest(new ArrayList<>(shardSuggestion)));
|
querySearchResult.suggest(new Suggest(new ArrayList<>(shardSuggestion)));
|
||||||
querySearchResult.setShardIndex(shardIndex);
|
querySearchResult.setShardIndex(shardIndex);
|
||||||
|
if (profile) {
|
||||||
|
querySearchResult.profileResults(
|
||||||
|
new SearchProfileQueryPhaseResult(List.of(), new AggregationProfileShardResult(List.of()))
|
||||||
|
);
|
||||||
|
}
|
||||||
queryResults.set(shardIndex, querySearchResult);
|
queryResults.set(shardIndex, querySearchResult);
|
||||||
}
|
}
|
||||||
return queryResults;
|
return queryResults;
|
||||||
|
@ -344,12 +391,16 @@ public class SearchPhaseControllerTests extends ESTestCase {
|
||||||
return groupedSuggestion.values().stream().map(completionSuggestion::reduce).collect(Collectors.toList());
|
return groupedSuggestion.values().stream().map(completionSuggestion::reduce).collect(Collectors.toList());
|
||||||
}
|
}
|
||||||
|
|
||||||
private static AtomicArray<SearchPhaseResult> generateFetchResults(int nShards, ScoreDoc[] mergedSearchDocs, Suggest mergedSuggest) {
|
private static AtomicArray<SearchPhaseResult> generateFetchResults(
|
||||||
AtomicArray<SearchPhaseResult> fetchResults = new AtomicArray<>(nShards);
|
List<SearchShardTarget> shards,
|
||||||
for (int shardIndex = 0; shardIndex < nShards; shardIndex++) {
|
ScoreDoc[] mergedSearchDocs,
|
||||||
|
Suggest mergedSuggest,
|
||||||
|
boolean profile
|
||||||
|
) {
|
||||||
|
AtomicArray<SearchPhaseResult> fetchResults = new AtomicArray<>(shards.size());
|
||||||
|
for (int shardIndex = 0; shardIndex < shards.size(); shardIndex++) {
|
||||||
float maxScore = -1F;
|
float maxScore = -1F;
|
||||||
String clusterAlias = randomBoolean() ? null : "remote";
|
SearchShardTarget shardTarget = shards.get(shardIndex);
|
||||||
SearchShardTarget shardTarget = new SearchShardTarget("", new ShardId("", "", shardIndex), clusterAlias, OriginalIndices.NONE);
|
|
||||||
FetchSearchResult fetchSearchResult = new FetchSearchResult(new ShardSearchContextId("", shardIndex), shardTarget);
|
FetchSearchResult fetchSearchResult = new FetchSearchResult(new ShardSearchContextId("", shardIndex), shardTarget);
|
||||||
List<SearchHit> searchHits = new ArrayList<>();
|
List<SearchHit> searchHits = new ArrayList<>();
|
||||||
for (ScoreDoc scoreDoc : mergedSearchDocs) {
|
for (ScoreDoc scoreDoc : mergedSearchDocs) {
|
||||||
|
@ -374,7 +425,10 @@ public class SearchPhaseControllerTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
SearchHit[] hits = searchHits.toArray(new SearchHit[0]);
|
SearchHit[] hits = searchHits.toArray(new SearchHit[0]);
|
||||||
fetchSearchResult.hits(new SearchHits(hits, new TotalHits(hits.length, Relation.EQUAL_TO), maxScore));
|
ProfileResult profileResult = profile && searchHits.size() > 0
|
||||||
|
? new ProfileResult("fetch", "fetch", Map.of(), Map.of(), randomNonNegativeLong(), List.of())
|
||||||
|
: null;
|
||||||
|
fetchSearchResult.shardResult(new SearchHits(hits, new TotalHits(hits.length, Relation.EQUAL_TO), maxScore), profileResult);
|
||||||
fetchResults.set(shardIndex, fetchSearchResult);
|
fetchResults.set(shardIndex, fetchSearchResult);
|
||||||
}
|
}
|
||||||
return fetchResults;
|
return fetchResults;
|
||||||
|
|
|
@ -13,8 +13,8 @@ import org.apache.lucene.search.TotalHits;
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.action.OriginalIndices;
|
import org.elasticsearch.action.OriginalIndices;
|
||||||
import org.elasticsearch.action.search.TransportSearchAction.SearchTimeProvider;
|
import org.elasticsearch.action.search.TransportSearchAction.SearchTimeProvider;
|
||||||
import org.elasticsearch.core.Tuple;
|
|
||||||
import org.elasticsearch.common.text.Text;
|
import org.elasticsearch.common.text.Text;
|
||||||
|
import org.elasticsearch.core.Tuple;
|
||||||
import org.elasticsearch.index.Index;
|
import org.elasticsearch.index.Index;
|
||||||
import org.elasticsearch.index.shard.ShardId;
|
import org.elasticsearch.index.shard.ShardId;
|
||||||
import org.elasticsearch.search.DocValueFormat;
|
import org.elasticsearch.search.DocValueFormat;
|
||||||
|
@ -28,9 +28,9 @@ import org.elasticsearch.search.aggregations.metrics.InternalMax;
|
||||||
import org.elasticsearch.search.aggregations.metrics.Max;
|
import org.elasticsearch.search.aggregations.metrics.Max;
|
||||||
import org.elasticsearch.search.internal.InternalSearchResponse;
|
import org.elasticsearch.search.internal.InternalSearchResponse;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult;
|
|
||||||
import org.elasticsearch.search.profile.SearchProfileResults;
|
import org.elasticsearch.search.profile.SearchProfileResults;
|
||||||
import org.elasticsearch.search.profile.SearchProfileShardResultsTests;
|
import org.elasticsearch.search.profile.SearchProfileResultsTests;
|
||||||
|
import org.elasticsearch.search.profile.SearchProfileShardResult;
|
||||||
import org.elasticsearch.search.suggest.Suggest;
|
import org.elasticsearch.search.suggest.Suggest;
|
||||||
import org.elasticsearch.search.suggest.completion.CompletionSuggestion;
|
import org.elasticsearch.search.suggest.completion.CompletionSuggestion;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
@ -212,9 +212,9 @@ public class SearchResponseMergerTests extends ESTestCase {
|
||||||
SearchTimeProvider searchTimeProvider = new SearchTimeProvider(0, 0, () -> 0);
|
SearchTimeProvider searchTimeProvider = new SearchTimeProvider(0, 0, () -> 0);
|
||||||
SearchResponseMerger merger = new SearchResponseMerger(0, 0, SearchContext.TRACK_TOTAL_HITS_ACCURATE,
|
SearchResponseMerger merger = new SearchResponseMerger(0, 0, SearchContext.TRACK_TOTAL_HITS_ACCURATE,
|
||||||
searchTimeProvider, emptyReduceContextBuilder());
|
searchTimeProvider, emptyReduceContextBuilder());
|
||||||
Map<String, SearchProfileQueryPhaseResult> expectedProfile = new HashMap<>();
|
Map<String, SearchProfileShardResult> expectedProfile = new HashMap<>();
|
||||||
for (int i = 0; i < numResponses; i++) {
|
for (int i = 0; i < numResponses; i++) {
|
||||||
SearchProfileResults profile = SearchProfileShardResultsTests.createTestItem();
|
SearchProfileResults profile = SearchProfileResultsTests.createTestItem();
|
||||||
expectedProfile.putAll(profile.getShardResults());
|
expectedProfile.putAll(profile.getShardResults());
|
||||||
SearchHits searchHits = new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN);
|
SearchHits searchHits = new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN);
|
||||||
InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchHits, null, null, profile, false, null, 1);
|
InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchHits, null, null, profile, false, null, 1);
|
||||||
|
|
|
@ -30,7 +30,7 @@ import org.elasticsearch.search.aggregations.AggregationsTests;
|
||||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||||
import org.elasticsearch.search.internal.InternalSearchResponse;
|
import org.elasticsearch.search.internal.InternalSearchResponse;
|
||||||
import org.elasticsearch.search.profile.SearchProfileResults;
|
import org.elasticsearch.search.profile.SearchProfileResults;
|
||||||
import org.elasticsearch.search.profile.SearchProfileShardResultsTests;
|
import org.elasticsearch.search.profile.SearchProfileResultsTests;
|
||||||
import org.elasticsearch.search.suggest.Suggest;
|
import org.elasticsearch.search.suggest.Suggest;
|
||||||
import org.elasticsearch.search.suggest.SuggestTests;
|
import org.elasticsearch.search.suggest.SuggestTests;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
@ -104,9 +104,16 @@ public class SearchResponseTests extends ESTestCase {
|
||||||
SearchHits hits = SearchHitsTests.createTestItem(true, true);
|
SearchHits hits = SearchHitsTests.createTestItem(true, true);
|
||||||
InternalAggregations aggregations = aggregationsTests.createTestInstance();
|
InternalAggregations aggregations = aggregationsTests.createTestInstance();
|
||||||
Suggest suggest = SuggestTests.createTestItem();
|
Suggest suggest = SuggestTests.createTestItem();
|
||||||
SearchProfileResults profileShardResults = SearchProfileShardResultsTests.createTestItem();
|
SearchProfileResults profileResults = SearchProfileResultsTests.createTestItem();
|
||||||
internalSearchResponse = new InternalSearchResponse(hits, aggregations, suggest, profileShardResults,
|
internalSearchResponse = new InternalSearchResponse(
|
||||||
timedOut, terminatedEarly, numReducePhases);
|
hits,
|
||||||
|
aggregations,
|
||||||
|
suggest,
|
||||||
|
profileResults,
|
||||||
|
timedOut,
|
||||||
|
terminatedEarly,
|
||||||
|
numReducePhases
|
||||||
|
);
|
||||||
} else {
|
} else {
|
||||||
internalSearchResponse = InternalSearchResponse.empty();
|
internalSearchResponse = InternalSearchResponse.empty();
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,42 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License
|
||||||
|
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||||
|
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||||
|
* Side Public License, v 1.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.search.fetch;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||||
|
import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor;
|
||||||
|
import org.elasticsearch.search.profile.ProfileResult;
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
import static io.github.nik9000.mapmatcher.MapMatcher.assertMap;
|
||||||
|
import static io.github.nik9000.mapmatcher.MapMatcher.matchesMap;
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
|
public class FetchProfilerTests extends ESTestCase {
|
||||||
|
public void testTime() {
|
||||||
|
long startTime = randomLongBetween(0, Long.MAX_VALUE / 2);
|
||||||
|
FetchProfiler profiler = new FetchProfiler(startTime);
|
||||||
|
long elapsed = randomLongBetween(0, Long.MAX_VALUE / 2);
|
||||||
|
ProfileResult result = profiler.finish(startTime + elapsed);
|
||||||
|
assertThat(result.getTime(), equalTo(elapsed));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testStoredFieldsIsOrdered() throws IOException {
|
||||||
|
FetchProfiler profiler = new FetchProfiler();
|
||||||
|
profiler.visitor(new CustomFieldsVisitor(Set.of(), true));
|
||||||
|
ProfileResult result = profiler.finish();
|
||||||
|
assertMap(result.getDebugInfo(), matchesMap().entry("stored_fields", List.of("_id", "_routing", "_source")));
|
||||||
|
// Make sure that serialization preserves the order
|
||||||
|
ProfileResult copy = copyWriteable(result, new NamedWriteableRegistry(List.of()), ProfileResult::new);
|
||||||
|
assertMap(copy.getDebugInfo(), matchesMap().entry("stored_fields", List.of("_id", "_routing", "_source")));
|
||||||
|
}
|
||||||
|
}
|
|
@ -9,13 +9,12 @@
|
||||||
package org.elasticsearch.search.profile;
|
package org.elasticsearch.search.profile;
|
||||||
|
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||||
import org.elasticsearch.common.xcontent.ToXContent;
|
import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
import org.elasticsearch.test.AbstractSerializingTestCase;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
@ -25,12 +24,9 @@ import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.function.Predicate;
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
|
public class ProfileResultTests extends AbstractSerializingTestCase<ProfileResult> {
|
||||||
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
|
public static final Predicate<String> RANDOM_FIELDS_EXCLUDE_FILTER = s -> s.endsWith(ProfileResult.BREAKDOWN.getPreferredName())
|
||||||
import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
|
|| s.endsWith(ProfileResult.DEBUG.getPreferredName());
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
|
|
||||||
|
|
||||||
public class ProfileResultTests extends ESTestCase {
|
|
||||||
|
|
||||||
public static ProfileResult createTestItem(int depth) {
|
public static ProfileResult createTestItem(int depth) {
|
||||||
String type = randomAlphaOfLengthBetween(5, 10);
|
String type = randomAlphaOfLengthBetween(5, 10);
|
||||||
|
@ -58,41 +54,24 @@ public class ProfileResultTests extends ESTestCase {
|
||||||
return new ProfileResult(type, description, breakdown, debug, randomNonNegativeLong(), children);
|
return new ProfileResult(type, description, breakdown, debug, randomNonNegativeLong(), children);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testFromXContent() throws IOException {
|
@Override
|
||||||
doFromXContentTestWithRandomFields(false);
|
protected ProfileResult createTestInstance() {
|
||||||
|
return createTestItem(2);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
@Override
|
||||||
* This test adds random fields and objects to the xContent rendered out to ensure we can parse it
|
protected Reader<ProfileResult> instanceReader() {
|
||||||
* back to be forward compatible with additions to the xContent
|
return ProfileResult::new;
|
||||||
*/
|
|
||||||
public void testFromXContentWithRandomFields() throws IOException {
|
|
||||||
doFromXContentTestWithRandomFields(true);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws IOException {
|
@Override
|
||||||
ProfileResult profileResult = createTestItem(2);
|
protected ProfileResult doParseInstance(XContentParser parser) throws IOException {
|
||||||
XContentType xContentType = randomFrom(XContentType.values());
|
return ProfileResult.fromXContent(parser);
|
||||||
boolean humanReadable = randomBoolean();
|
}
|
||||||
BytesReference originalBytes = toShuffledXContent(profileResult, xContentType, ToXContent.EMPTY_PARAMS, humanReadable);
|
|
||||||
BytesReference mutated;
|
@Override
|
||||||
if (addRandomFields) {
|
protected Predicate<String> getRandomFieldsExcludeFilter() {
|
||||||
// "breakdown" and "debug" just consists of key/value pairs, we shouldn't add anything random there
|
return RANDOM_FIELDS_EXCLUDE_FILTER;
|
||||||
Predicate<String> excludeFilter = (s) ->
|
|
||||||
s.endsWith(ProfileResult.BREAKDOWN.getPreferredName()) || s.endsWith(ProfileResult.DEBUG.getPreferredName());
|
|
||||||
mutated = insertRandomFields(xContentType, originalBytes, excludeFilter, random());
|
|
||||||
} else {
|
|
||||||
mutated = originalBytes;
|
|
||||||
}
|
|
||||||
ProfileResult parsed;
|
|
||||||
try (XContentParser parser = createParser(xContentType.xContent(), mutated)) {
|
|
||||||
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
|
|
||||||
parsed = ProfileResult.fromXContent(parser);
|
|
||||||
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
|
|
||||||
assertNull(parser.nextToken());
|
|
||||||
}
|
|
||||||
assertEquals(profileResult.getTime(), parsed.getTime());
|
|
||||||
assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, humanReadable), xContentType);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testToXContent() throws IOException {
|
public void testToXContent() throws IOException {
|
||||||
|
|
|
@ -0,0 +1,41 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License
|
||||||
|
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||||
|
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||||
|
* Side Public License, v 1.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.search.profile;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||||
|
import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResult;
|
||||||
|
import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResultTests;
|
||||||
|
import org.elasticsearch.search.profile.query.QueryProfileShardResult;
|
||||||
|
import org.elasticsearch.search.profile.query.QueryProfileShardResultTests;
|
||||||
|
import org.elasticsearch.test.AbstractWireSerializingTestCase;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public class SearchProfileQueryPhaseResultTests extends AbstractWireSerializingTestCase<SearchProfileQueryPhaseResult> {
|
||||||
|
static SearchProfileQueryPhaseResult createTestItem() {
|
||||||
|
List<QueryProfileShardResult> queryProfileResults = new ArrayList<>();
|
||||||
|
int queryItems = rarely() ? 0 : randomIntBetween(1, 2);
|
||||||
|
for (int q = 0; q < queryItems; q++) {
|
||||||
|
queryProfileResults.add(QueryProfileShardResultTests.createTestItem());
|
||||||
|
}
|
||||||
|
AggregationProfileShardResult aggProfileShardResult = AggregationProfileShardResultTests.createTestItem(1);
|
||||||
|
return new SearchProfileQueryPhaseResult(queryProfileResults, aggProfileShardResult);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected SearchProfileQueryPhaseResult createTestInstance() {
|
||||||
|
return createTestItem();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Reader<SearchProfileQueryPhaseResult> instanceReader() {
|
||||||
|
return SearchProfileQueryPhaseResult::new;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,95 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License
|
||||||
|
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||||
|
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||||
|
* Side Public License, v 1.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.search.profile;
|
||||||
|
|
||||||
|
import org.elasticsearch.index.shard.ShardId;
|
||||||
|
import org.elasticsearch.search.SearchHits;
|
||||||
|
import org.elasticsearch.search.SearchShardTarget;
|
||||||
|
import org.elasticsearch.search.fetch.FetchSearchResult;
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import static java.util.stream.Collectors.toList;
|
||||||
|
import static java.util.stream.Collectors.toMap;
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.matchesPattern;
|
||||||
|
|
||||||
|
public class SearchProfileResultsBuilderTests extends ESTestCase {
|
||||||
|
public void testFetchWithoutQuery() {
|
||||||
|
Map<SearchShardTarget, SearchProfileQueryPhaseResult> searchPhase = randomSearchPhaseResults();
|
||||||
|
FetchSearchResult fetchPhase = fetchResult(
|
||||||
|
randomValueOtherThanMany(searchPhase::containsKey, SearchProfileResultsBuilderTests::randomTarget),
|
||||||
|
null
|
||||||
|
);
|
||||||
|
Exception e = expectThrows(IllegalStateException.class, () -> builder(searchPhase).build(List.of(fetchPhase)));
|
||||||
|
assertThat(
|
||||||
|
e.getMessage(),
|
||||||
|
matchesPattern(
|
||||||
|
"Profile returned fetch phase information for .+ but didn't return query phase information\\. Query phase keys were .+"
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testQueryWithoutAnyFetch() {
|
||||||
|
Map<SearchShardTarget, SearchProfileQueryPhaseResult> searchPhase = randomSearchPhaseResults();
|
||||||
|
FetchSearchResult fetchPhase = fetchResult(searchPhase.keySet().iterator().next(), null);
|
||||||
|
SearchProfileResults result = builder(searchPhase).build(List.of(fetchPhase));
|
||||||
|
assertThat(
|
||||||
|
result.getShardResults().values().stream().filter(r -> r.getQueryPhase() != null).count(),
|
||||||
|
equalTo((long) searchPhase.size())
|
||||||
|
);
|
||||||
|
assertThat(result.getShardResults().values().stream().filter(r -> r.getFetchPhase() != null).count(), equalTo(0L));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testQueryAndFetch() {
|
||||||
|
Map<SearchShardTarget, SearchProfileQueryPhaseResult> searchPhase = randomSearchPhaseResults();
|
||||||
|
List<FetchSearchResult> fetchPhase = searchPhase.entrySet()
|
||||||
|
.stream()
|
||||||
|
.map(e -> fetchResult(e.getKey(), new ProfileResult("fetch", "", Map.of(), Map.of(), 1, List.of())))
|
||||||
|
.collect(toList());
|
||||||
|
SearchProfileResults result = builder(searchPhase).build(fetchPhase);
|
||||||
|
assertThat(
|
||||||
|
result.getShardResults().values().stream().filter(r -> r.getQueryPhase() != null).count(),
|
||||||
|
equalTo((long) searchPhase.size())
|
||||||
|
);
|
||||||
|
assertThat(
|
||||||
|
result.getShardResults().values().stream().filter(r -> r.getFetchPhase() != null).count(),
|
||||||
|
equalTo((long) searchPhase.size())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Map<SearchShardTarget, SearchProfileQueryPhaseResult> randomSearchPhaseResults() {
|
||||||
|
int size = rarely() ? 0 : randomIntBetween(1, 2);
|
||||||
|
Map<SearchShardTarget, SearchProfileQueryPhaseResult> results = new HashMap<>(size);
|
||||||
|
while (results.size() < size) {
|
||||||
|
results.put(randomTarget(), SearchProfileQueryPhaseResultTests.createTestItem());
|
||||||
|
}
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static SearchProfileResultsBuilder builder(Map<SearchShardTarget, SearchProfileQueryPhaseResult> searchPhase) {
|
||||||
|
return new SearchProfileResultsBuilder(
|
||||||
|
searchPhase.entrySet().stream().collect(toMap(e -> e.getKey().toString(), Map.Entry::getValue))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static FetchSearchResult fetchResult(SearchShardTarget target, ProfileResult profileResult) {
|
||||||
|
FetchSearchResult fetchResult = new FetchSearchResult();
|
||||||
|
fetchResult.shardResult(SearchHits.empty(), profileResult);
|
||||||
|
fetchResult.setSearchShardTarget(target);
|
||||||
|
return fetchResult;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static SearchShardTarget randomTarget() {
|
||||||
|
return new SearchShardTarget(randomAlphaOfLength(5), new ShardId(randomAlphaOfLength(5), "uuid", randomInt(6)), null, null);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,63 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License
|
||||||
|
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||||
|
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||||
|
* Side Public License, v 1.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.search.profile;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.test.AbstractSerializingTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
|
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
|
||||||
|
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureFieldName;
|
||||||
|
|
||||||
|
public class SearchProfileResultsTests extends AbstractSerializingTestCase<SearchProfileResults> {
|
||||||
|
public static SearchProfileResults createTestItem() {
|
||||||
|
int size = rarely() ? 0 : randomIntBetween(1, 2);
|
||||||
|
Map<String, SearchProfileShardResult> shards = new HashMap<>(size);
|
||||||
|
for (int i = 0; i < size; i++) {
|
||||||
|
SearchProfileQueryPhaseResult searchResult = SearchProfileQueryPhaseResultTests.createTestItem();
|
||||||
|
ProfileResult fetchResult = randomBoolean() ? null : ProfileResultTests.createTestItem(2);
|
||||||
|
shards.put(
|
||||||
|
randomAlphaOfLengthBetween(5, 10),
|
||||||
|
new SearchProfileShardResult(searchResult, fetchResult)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return new SearchProfileResults(shards);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected SearchProfileResults createTestInstance() {
|
||||||
|
return createTestItem();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Reader<SearchProfileResults> instanceReader() {
|
||||||
|
return SearchProfileResults::new;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected SearchProfileResults doParseInstance(XContentParser parser) throws IOException {
|
||||||
|
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
|
||||||
|
ensureFieldName(parser, parser.nextToken(), SearchProfileResults.PROFILE_FIELD);
|
||||||
|
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
|
||||||
|
SearchProfileResults result = SearchProfileResults.fromXContent(parser);
|
||||||
|
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
|
||||||
|
assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Predicate<String> getRandomFieldsExcludeFilter() {
|
||||||
|
return ProfileResultTests.RANDOM_FIELDS_EXCLUDE_FILTER;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,30 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License
|
||||||
|
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||||
|
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||||
|
* Side Public License, v 1.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.search.profile;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||||
|
import org.elasticsearch.test.AbstractWireSerializingTestCase;
|
||||||
|
|
||||||
|
public class SearchProfileShardResultTests extends AbstractWireSerializingTestCase<SearchProfileShardResult> {
|
||||||
|
static SearchProfileShardResult createTestItem() {
|
||||||
|
SearchProfileQueryPhaseResult searchResult = SearchProfileQueryPhaseResultTests.createTestItem();
|
||||||
|
ProfileResult fetchResult = randomBoolean() ? null : ProfileResultTests.createTestItem(2);
|
||||||
|
return new SearchProfileShardResult(searchResult, fetchResult);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected SearchProfileShardResult createTestInstance() {
|
||||||
|
return createTestItem();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Reader<SearchProfileShardResult> instanceReader() {
|
||||||
|
return SearchProfileShardResult::new;
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,96 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the Elastic License
|
|
||||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
|
||||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
|
||||||
* Side Public License, v 1.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.search.profile;
|
|
||||||
|
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
|
||||||
import org.elasticsearch.common.xcontent.ToXContent;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
|
||||||
import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResult;
|
|
||||||
import org.elasticsearch.search.profile.aggregation.AggregationProfileShardResultTests;
|
|
||||||
import org.elasticsearch.search.profile.query.QueryProfileShardResult;
|
|
||||||
import org.elasticsearch.search.profile.query.QueryProfileShardResultTests;
|
|
||||||
import org.elasticsearch.test.ESTestCase;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.function.Predicate;
|
|
||||||
|
|
||||||
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
|
|
||||||
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
|
|
||||||
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureFieldName;
|
|
||||||
import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
|
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
|
|
||||||
|
|
||||||
public class SearchProfileShardResultsTests extends ESTestCase {
|
|
||||||
|
|
||||||
public static SearchProfileResults createTestItem() {
|
|
||||||
int size = rarely() ? 0 : randomIntBetween(1, 2);
|
|
||||||
Map<String, SearchProfileQueryPhaseResult> searchProfileResults = new HashMap<>(size);
|
|
||||||
for (int i = 0; i < size; i++) {
|
|
||||||
List<QueryProfileShardResult> queryProfileResults = new ArrayList<>();
|
|
||||||
int queryItems = rarely() ? 0 : randomIntBetween(1, 2);
|
|
||||||
for (int q = 0; q < queryItems; q++) {
|
|
||||||
queryProfileResults.add(QueryProfileShardResultTests.createTestItem());
|
|
||||||
}
|
|
||||||
AggregationProfileShardResult aggProfileShardResult = AggregationProfileShardResultTests.createTestItem(1);
|
|
||||||
searchProfileResults.put(
|
|
||||||
randomAlphaOfLengthBetween(5, 10),
|
|
||||||
new SearchProfileQueryPhaseResult(queryProfileResults, aggProfileShardResult)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return new SearchProfileResults(searchProfileResults);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testFromXContent() throws IOException {
|
|
||||||
doFromXContentTestWithRandomFields(false);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This test adds random fields and objects to the xContent rendered out to ensure we can parse it
|
|
||||||
* back to be forward compatible with additions to the xContent
|
|
||||||
*/
|
|
||||||
public void testFromXContentWithRandomFields() throws IOException {
|
|
||||||
doFromXContentTestWithRandomFields(true);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws IOException {
|
|
||||||
SearchProfileResults shardResult = createTestItem();
|
|
||||||
XContentType xContentType = randomFrom(XContentType.values());
|
|
||||||
boolean humanReadable = randomBoolean();
|
|
||||||
BytesReference originalBytes = toShuffledXContent(shardResult, xContentType, ToXContent.EMPTY_PARAMS, humanReadable);
|
|
||||||
BytesReference mutated;
|
|
||||||
if (addRandomFields) {
|
|
||||||
// The ProfileResults "breakdown" section just consists of key/value pairs, we shouldn't add anything random there
|
|
||||||
// also we don't want to insert into the root object here, its just the PROFILE_FIELD itself
|
|
||||||
Predicate<String> excludeFilter = (s) -> s.isEmpty()
|
|
||||||
|| s.endsWith(ProfileResult.BREAKDOWN.getPreferredName())
|
|
||||||
|| s.endsWith(ProfileResult.DEBUG.getPreferredName());
|
|
||||||
mutated = insertRandomFields(xContentType, originalBytes, excludeFilter, random());
|
|
||||||
} else {
|
|
||||||
mutated = originalBytes;
|
|
||||||
}
|
|
||||||
SearchProfileResults parsed;
|
|
||||||
try (XContentParser parser = createParser(xContentType.xContent(), mutated)) {
|
|
||||||
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
|
|
||||||
ensureFieldName(parser, parser.nextToken(), SearchProfileResults.PROFILE_FIELD);
|
|
||||||
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
|
|
||||||
parsed = SearchProfileResults.fromXContent(parser);
|
|
||||||
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
|
|
||||||
assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
|
|
||||||
assertNull(parser.nextToken());
|
|
||||||
}
|
|
||||||
assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, humanReadable), xContentType);
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -9,13 +9,13 @@
|
||||||
package org.elasticsearch.search.profile.aggregation;
|
package org.elasticsearch.search.profile.aggregation;
|
||||||
|
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.xcontent.ToXContent;
|
import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentParserUtils;
|
import org.elasticsearch.common.xcontent.XContentParserUtils;
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
import org.elasticsearch.common.xcontent.XContentType;
|
||||||
import org.elasticsearch.search.profile.ProfileResult;
|
import org.elasticsearch.search.profile.ProfileResult;
|
||||||
import org.elasticsearch.search.profile.ProfileResultTests;
|
import org.elasticsearch.search.profile.ProfileResultTests;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.AbstractSerializingTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
@ -23,11 +23,11 @@ import java.util.Collections;
|
||||||
import java.util.LinkedHashMap;
|
import java.util.LinkedHashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
|
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
|
|
||||||
|
|
||||||
public class AggregationProfileShardResultTests extends ESTestCase {
|
public class AggregationProfileShardResultTests extends AbstractSerializingTestCase<AggregationProfileShardResult> {
|
||||||
|
|
||||||
public static AggregationProfileShardResult createTestItem(int depth) {
|
public static AggregationProfileShardResult createTestItem(int depth) {
|
||||||
int size = randomIntBetween(0, 5);
|
int size = randomIntBetween(0, 5);
|
||||||
|
@ -38,22 +38,30 @@ public class AggregationProfileShardResultTests extends ESTestCase {
|
||||||
return new AggregationProfileShardResult(aggProfileResults);
|
return new AggregationProfileShardResult(aggProfileResults);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testFromXContent() throws IOException {
|
@Override
|
||||||
AggregationProfileShardResult profileResult = createTestItem(2);
|
protected AggregationProfileShardResult createTestInstance() {
|
||||||
XContentType xContentType = randomFrom(XContentType.values());
|
return createTestItem(2);
|
||||||
boolean humanReadable = randomBoolean();
|
}
|
||||||
BytesReference originalBytes = toShuffledXContent(profileResult, xContentType, ToXContent.EMPTY_PARAMS, humanReadable);
|
|
||||||
|
|
||||||
AggregationProfileShardResult parsed;
|
@Override
|
||||||
try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {
|
protected AggregationProfileShardResult doParseInstance(XContentParser parser) throws IOException {
|
||||||
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
|
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
|
||||||
XContentParserUtils.ensureFieldName(parser, parser.nextToken(), AggregationProfileShardResult.AGGREGATIONS);
|
XContentParserUtils.ensureFieldName(parser, parser.nextToken(), AggregationProfileShardResult.AGGREGATIONS);
|
||||||
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser);
|
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser);
|
||||||
parsed = AggregationProfileShardResult.fromXContent(parser);
|
AggregationProfileShardResult result = AggregationProfileShardResult.fromXContent(parser);
|
||||||
assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
|
XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_ARRAY, parser.currentToken(), parser);
|
||||||
assertNull(parser.nextToken());
|
XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser);
|
||||||
}
|
return result;
|
||||||
assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, humanReadable), xContentType);
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Reader<AggregationProfileShardResult> instanceReader() {
|
||||||
|
return AggregationProfileShardResult::new;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Predicate<String> getRandomFieldsExcludeFilter() {
|
||||||
|
return ProfileResultTests.RANDOM_FIELDS_EXCLUDE_FILTER;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testToXContent() throws IOException {
|
public void testToXContent() throws IOException {
|
||||||
|
|
|
@ -9,26 +9,21 @@
|
||||||
package org.elasticsearch.search.profile.query;
|
package org.elasticsearch.search.profile.query;
|
||||||
|
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||||
import org.elasticsearch.common.xcontent.ToXContent;
|
import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
import org.elasticsearch.test.AbstractSerializingTestCase;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
|
|
||||||
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
|
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
|
||||||
import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
|
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
|
|
||||||
|
|
||||||
public class CollectorResultTests extends ESTestCase {
|
|
||||||
|
|
||||||
|
public class CollectorResultTests extends AbstractSerializingTestCase<CollectorResult> {
|
||||||
public static CollectorResult createTestItem(int depth) {
|
public static CollectorResult createTestItem(int depth) {
|
||||||
String name = randomAlphaOfLengthBetween(5, 10);
|
String name = randomAlphaOfLengthBetween(5, 10);
|
||||||
String reason = randomAlphaOfLengthBetween(5, 10);
|
String reason = randomAlphaOfLengthBetween(5, 10);
|
||||||
|
@ -47,31 +42,22 @@ public class CollectorResultTests extends ESTestCase {
|
||||||
return new CollectorResult(name, reason, time, children);
|
return new CollectorResult(name, reason, time, children);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testFromXContent() throws IOException {
|
@Override
|
||||||
doFromXContentTestWithRandomFields(false);
|
protected CollectorResult createTestInstance() {
|
||||||
|
return createTestItem(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testFromXContentWithRandomFields() throws IOException {
|
@Override
|
||||||
doFromXContentTestWithRandomFields(true);
|
protected CollectorResult doParseInstance(XContentParser parser) throws IOException {
|
||||||
|
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
|
||||||
|
CollectorResult result = CollectorResult.fromXContent(parser);
|
||||||
|
ensureExpectedToken(null, parser.nextToken(), parser);
|
||||||
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws IOException {
|
@Override
|
||||||
CollectorResult collectorResult = createTestItem(1);
|
protected Reader<CollectorResult> instanceReader() {
|
||||||
XContentType xContentType = randomFrom(XContentType.values());
|
return CollectorResult::new;
|
||||||
boolean humanReadable = randomBoolean();
|
|
||||||
BytesReference originalBytes = toShuffledXContent(collectorResult, xContentType, ToXContent.EMPTY_PARAMS, humanReadable);
|
|
||||||
BytesReference mutated;
|
|
||||||
if (addRandomFields) {
|
|
||||||
mutated = insertRandomFields(xContentType, originalBytes, null, random());
|
|
||||||
} else {
|
|
||||||
mutated = originalBytes;
|
|
||||||
}
|
|
||||||
try (XContentParser parser = createParser(xContentType.xContent(), mutated)) {
|
|
||||||
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
|
|
||||||
CollectorResult parsed = CollectorResult.fromXContent(parser);
|
|
||||||
assertNull(parser.nextToken());
|
|
||||||
assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, humanReadable), xContentType);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testToXContent() throws IOException {
|
public void testToXContent() throws IOException {
|
||||||
|
|
|
@ -8,24 +8,20 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.profile.query;
|
package org.elasticsearch.search.profile.query;
|
||||||
|
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||||
import org.elasticsearch.common.xcontent.ToXContent;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentParserUtils;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
|
||||||
import org.elasticsearch.search.profile.ProfileResult;
|
import org.elasticsearch.search.profile.ProfileResult;
|
||||||
import org.elasticsearch.search.profile.ProfileResultTests;
|
import org.elasticsearch.search.profile.ProfileResultTests;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.AbstractSerializingTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
|
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
|
|
||||||
|
|
||||||
public class QueryProfileShardResultTests extends ESTestCase {
|
|
||||||
|
|
||||||
|
public class QueryProfileShardResultTests extends AbstractSerializingTestCase<QueryProfileShardResult> {
|
||||||
public static QueryProfileShardResult createTestItem() {
|
public static QueryProfileShardResult createTestItem() {
|
||||||
int size = randomIntBetween(0, 5);
|
int size = randomIntBetween(0, 5);
|
||||||
List<ProfileResult> queryProfileResults = new ArrayList<>(size);
|
List<ProfileResult> queryProfileResults = new ArrayList<>(size);
|
||||||
|
@ -40,20 +36,26 @@ public class QueryProfileShardResultTests extends ESTestCase {
|
||||||
return new QueryProfileShardResult(queryProfileResults, rewriteTime, profileCollector);
|
return new QueryProfileShardResult(queryProfileResults, rewriteTime, profileCollector);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testFromXContent() throws IOException {
|
@Override
|
||||||
QueryProfileShardResult profileResult = createTestItem();
|
protected QueryProfileShardResult createTestInstance() {
|
||||||
XContentType xContentType = randomFrom(XContentType.values());
|
return createTestItem();
|
||||||
boolean humanReadable = randomBoolean();
|
|
||||||
BytesReference originalBytes = toShuffledXContent(profileResult, xContentType, ToXContent.EMPTY_PARAMS, humanReadable);
|
|
||||||
|
|
||||||
QueryProfileShardResult parsed;
|
|
||||||
try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {
|
|
||||||
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
|
|
||||||
parsed = QueryProfileShardResult.fromXContent(parser);
|
|
||||||
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
|
|
||||||
assertNull(parser.nextToken());
|
|
||||||
}
|
|
||||||
assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, humanReadable), xContentType);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected QueryProfileShardResult doParseInstance(XContentParser parser) throws IOException {
|
||||||
|
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
|
||||||
|
QueryProfileShardResult result = QueryProfileShardResult.fromXContent(parser);
|
||||||
|
ensureExpectedToken(null, parser.nextToken(), parser);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Reader<QueryProfileShardResult> instanceReader() {
|
||||||
|
return QueryProfileShardResult::new;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Predicate<String> getRandomFieldsExcludeFilter() {
|
||||||
|
return ProfileResultTests.RANDOM_FIELDS_EXCLUDE_FILTER;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,11 +12,12 @@ import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.common.io.stream.NamedWriteable;
|
import org.elasticsearch.common.io.stream.NamedWriteable;
|
||||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
import org.hamcrest.Matchers;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Standard test case for testing wire serialization. If the class being tested
|
* Standard test case for testing wire serialization. If the class being tested
|
||||||
* extends {@link Writeable} then prefer extending {@link AbstractWireSerializingTestCase}.
|
* extends {@link Writeable} then prefer extending {@link AbstractWireSerializingTestCase}.
|
||||||
|
@ -84,8 +85,8 @@ public abstract class AbstractWireTestCase<T> extends ESTestCase {
|
||||||
*/
|
*/
|
||||||
protected void assertEqualInstances(T expectedInstance, T newInstance) {
|
protected void assertEqualInstances(T expectedInstance, T newInstance) {
|
||||||
assertNotSame(newInstance, expectedInstance);
|
assertNotSame(newInstance, expectedInstance);
|
||||||
assertThat(expectedInstance, Matchers.equalTo(newInstance));
|
assertThat(newInstance, equalTo(expectedInstance));
|
||||||
assertEquals(expectedInstance.hashCode(), newInstance.hashCode());
|
assertThat(newInstance.hashCode(), equalTo(expectedInstance.hashCode()));
|
||||||
}
|
}
|
||||||
|
|
||||||
protected final T copyInstance(T instance) throws IOException {
|
protected final T copyInstance(T instance) throws IOException {
|
||||||
|
|
|
@ -14,6 +14,8 @@ import org.elasticsearch.action.bulk.BulkItemResponse;
|
||||||
import org.elasticsearch.action.bulk.BulkResponse;
|
import org.elasticsearch.action.bulk.BulkResponse;
|
||||||
import org.elasticsearch.action.get.GetResponse;
|
import org.elasticsearch.action.get.GetResponse;
|
||||||
import org.elasticsearch.action.get.MultiGetResponse;
|
import org.elasticsearch.action.get.MultiGetResponse;
|
||||||
|
import org.elasticsearch.action.search.ClosePointInTimeAction;
|
||||||
|
import org.elasticsearch.action.search.ClosePointInTimeRequest;
|
||||||
import org.elasticsearch.action.search.MultiSearchResponse;
|
import org.elasticsearch.action.search.MultiSearchResponse;
|
||||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||||
import org.elasticsearch.action.search.SearchResponse;
|
import org.elasticsearch.action.search.SearchResponse;
|
||||||
|
@ -28,9 +30,9 @@ import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.geo.ShapeRelation;
|
import org.elasticsearch.common.geo.ShapeRelation;
|
||||||
import org.elasticsearch.common.settings.SecureString;
|
import org.elasticsearch.common.settings.SecureString;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.core.TimeValue;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
import org.elasticsearch.common.xcontent.XContentType;
|
||||||
|
import org.elasticsearch.core.TimeValue;
|
||||||
import org.elasticsearch.index.query.FuzzyQueryBuilder;
|
import org.elasticsearch.index.query.FuzzyQueryBuilder;
|
||||||
import org.elasticsearch.index.query.InnerHitBuilder;
|
import org.elasticsearch.index.query.InnerHitBuilder;
|
||||||
import org.elasticsearch.index.query.QueryBuilder;
|
import org.elasticsearch.index.query.QueryBuilder;
|
||||||
|
@ -47,7 +49,8 @@ import org.elasticsearch.search.aggregations.AggregationBuilders;
|
||||||
import org.elasticsearch.search.aggregations.bucket.global.Global;
|
import org.elasticsearch.search.aggregations.bucket.global.Global;
|
||||||
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
|
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
|
||||||
import org.elasticsearch.search.builder.PointInTimeBuilder;
|
import org.elasticsearch.search.builder.PointInTimeBuilder;
|
||||||
import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult;
|
import org.elasticsearch.search.profile.ProfileResult;
|
||||||
|
import org.elasticsearch.search.profile.SearchProfileShardResult;
|
||||||
import org.elasticsearch.search.profile.query.QueryProfileShardResult;
|
import org.elasticsearch.search.profile.query.QueryProfileShardResult;
|
||||||
import org.elasticsearch.search.sort.SortBuilders;
|
import org.elasticsearch.search.sort.SortBuilders;
|
||||||
import org.elasticsearch.search.sort.SortMode;
|
import org.elasticsearch.search.sort.SortMode;
|
||||||
|
@ -63,8 +66,6 @@ import org.elasticsearch.test.InternalSettingsPlugin;
|
||||||
import org.elasticsearch.test.SecurityIntegTestCase;
|
import org.elasticsearch.test.SecurityIntegTestCase;
|
||||||
import org.elasticsearch.test.SecuritySettingsSourceField;
|
import org.elasticsearch.test.SecuritySettingsSourceField;
|
||||||
import org.elasticsearch.xpack.core.XPackSettings;
|
import org.elasticsearch.xpack.core.XPackSettings;
|
||||||
import org.elasticsearch.action.search.ClosePointInTimeAction;
|
|
||||||
import org.elasticsearch.action.search.ClosePointInTimeRequest;
|
|
||||||
import org.elasticsearch.xpack.security.LocalStateSecurity;
|
import org.elasticsearch.xpack.security.LocalStateSecurity;
|
||||||
import org.elasticsearch.xpack.spatial.SpatialPlugin;
|
import org.elasticsearch.xpack.spatial.SpatialPlugin;
|
||||||
import org.elasticsearch.xpack.spatial.index.query.ShapeQueryBuilder;
|
import org.elasticsearch.xpack.spatial.index.query.ShapeQueryBuilder;
|
||||||
|
@ -76,6 +77,7 @@ import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
|
import static java.util.stream.Collectors.toList;
|
||||||
import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
|
import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
|
||||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||||
|
@ -1400,13 +1402,15 @@ public class DocumentLevelSecurityTests extends SecurityIntegTestCase {
|
||||||
assertNoFailures(response);
|
assertNoFailures(response);
|
||||||
|
|
||||||
assertThat(response.getProfileResults().size(), equalTo(1));
|
assertThat(response.getProfileResults().size(), equalTo(1));
|
||||||
SearchProfileQueryPhaseResult shardResult = response.getProfileResults().get(response.getProfileResults().keySet().toArray()[0]);
|
SearchProfileShardResult shardResult = response.getProfileResults().get(response.getProfileResults().keySet().toArray()[0]);
|
||||||
assertThat(shardResult.getQueryProfileResults().size(), equalTo(1));
|
assertThat(shardResult.getQueryProfileResults().size(), equalTo(1));
|
||||||
QueryProfileShardResult queryProfileShardResult = shardResult.getQueryProfileResults().get(0);
|
QueryProfileShardResult queryProfileShardResult = shardResult.getQueryProfileResults().get(0);
|
||||||
assertThat(queryProfileShardResult.getQueryResults().size(), equalTo(1));
|
assertThat(queryProfileShardResult.getQueryResults().size(), equalTo(1));
|
||||||
logger.info("queryProfileShardResult=" + Strings.toString(queryProfileShardResult));
|
logger.info("queryProfileShardResult=" + Strings.toString(queryProfileShardResult));
|
||||||
// ProfileResult profileResult = queryProfileShardResult.getQueryResults().get(0);
|
assertThat(
|
||||||
// assertThat(profileResult.getLuceneDescription(), equalTo("(other_field:value)^0.8"));
|
queryProfileShardResult.getQueryResults().stream().map(ProfileResult::getLuceneDescription).sorted().collect(toList()),
|
||||||
|
equalTo(List.of("(other_field:value)^0.8"))
|
||||||
|
);
|
||||||
|
|
||||||
final String[] indices =
|
final String[] indices =
|
||||||
randomFrom(List.of(new String[] { "test" }, new String[] { "fls-index", "test" }, new String[] { "test", "fls-index" }));
|
randomFrom(List.of(new String[] { "test" }, new String[] { "fls-index", "test" }, new String[] { "test", "fls-index" }));
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue