mirror of
https://github.com/elastic/elasticsearch.git
synced 2025-06-28 09:28:55 -04:00
Create some general test utilities (#127407)
Moving around and adding some test utilities.
This commit is contained in:
parent
677ab3545b
commit
0700b24dd0
3 changed files with 21 additions and 15 deletions
|
@ -12,8 +12,6 @@ import org.apache.http.entity.ContentType;
|
||||||
import org.apache.http.nio.entity.NByteArrayEntity;
|
import org.apache.http.nio.entity.NByteArrayEntity;
|
||||||
import org.apache.logging.log4j.LogManager;
|
import org.apache.logging.log4j.LogManager;
|
||||||
import org.apache.lucene.util.SetOnce;
|
import org.apache.lucene.util.SetOnce;
|
||||||
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
|
|
||||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
|
|
||||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
|
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
|
||||||
import org.elasticsearch.action.bulk.BulkRequestBuilder;
|
import org.elasticsearch.action.bulk.BulkRequestBuilder;
|
||||||
import org.elasticsearch.action.search.MultiSearchRequest;
|
import org.elasticsearch.action.search.MultiSearchRequest;
|
||||||
|
@ -46,7 +44,6 @@ import java.nio.charset.Charset;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.concurrent.CancellationException;
|
import java.util.concurrent.CancellationException;
|
||||||
|
@ -94,7 +91,7 @@ public class SearchRestCancellationIT extends HttpSmokeTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
void verifyCancellationDuringQueryPhase(String searchAction, Request searchRequest) throws Exception {
|
void verifyCancellationDuringQueryPhase(String searchAction, Request searchRequest) throws Exception {
|
||||||
Map<String, String> nodeIdToName = readNodesInfo();
|
Map<String, String> nodeIdToName = nodeIdsToNames();
|
||||||
|
|
||||||
List<ScriptedBlockPlugin> plugins = initBlockFactory();
|
List<ScriptedBlockPlugin> plugins = initBlockFactory();
|
||||||
indexTestData();
|
indexTestData();
|
||||||
|
@ -137,7 +134,7 @@ public class SearchRestCancellationIT extends HttpSmokeTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
void verifyCancellationDuringFetchPhase(String searchAction, Request searchRequest) throws Exception {
|
void verifyCancellationDuringFetchPhase(String searchAction, Request searchRequest) throws Exception {
|
||||||
Map<String, String> nodeIdToName = readNodesInfo();
|
Map<String, String> nodeIdToName = nodeIdsToNames();
|
||||||
|
|
||||||
List<ScriptedBlockPlugin> plugins = initBlockFactory();
|
List<ScriptedBlockPlugin> plugins = initBlockFactory();
|
||||||
indexTestData();
|
indexTestData();
|
||||||
|
@ -153,16 +150,6 @@ public class SearchRestCancellationIT extends HttpSmokeTestCase {
|
||||||
expectThrows(CancellationException.class, future::actionGet);
|
expectThrows(CancellationException.class, future::actionGet);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Map<String, String> readNodesInfo() {
|
|
||||||
Map<String, String> nodeIdToName = new HashMap<>();
|
|
||||||
NodesInfoResponse nodesInfoResponse = clusterAdmin().prepareNodesInfo().get();
|
|
||||||
assertFalse(nodesInfoResponse.hasFailures());
|
|
||||||
for (NodeInfo node : nodesInfoResponse.getNodes()) {
|
|
||||||
nodeIdToName.put(node.getNode().getId(), node.getNode().getName());
|
|
||||||
}
|
|
||||||
return nodeIdToName;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static void ensureSearchTaskIsCancelled(String transportAction, Function<String, String> nodeIdToName) throws Exception {
|
private static void ensureSearchTaskIsCancelled(String transportAction, Function<String, String> nodeIdToName) throws Exception {
|
||||||
SetOnce<TaskInfo> searchTask = new SetOnce<>();
|
SetOnce<TaskInfo> searchTask = new SetOnce<>();
|
||||||
ListTasksResponse listTasksResponse = clusterAdmin().prepareListTasks().get();
|
ListTasksResponse listTasksResponse = clusterAdmin().prepareListTasks().get();
|
||||||
|
|
|
@ -193,6 +193,7 @@ import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.IdentityHashMap;
|
import java.util.IdentityHashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -1207,6 +1208,19 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
||||||
return internalCluster().getInstance(ClusterService.class, nodeName).localNode().getId();
|
return internalCluster().getInstance(ClusterService.class, nodeName).localNode().getId();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return A map of the cluster node Ids to their node names.
|
||||||
|
*/
|
||||||
|
public static Map<String, String> nodeIdsToNames() {
|
||||||
|
var names = internalCluster().getNodeNames();
|
||||||
|
Map<String, String> nodeIdsToNames = new HashMap<>();
|
||||||
|
for (var name : names) {
|
||||||
|
nodeIdsToNames.put(getNodeId(name), name);
|
||||||
|
}
|
||||||
|
return nodeIdsToNames;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Waits until at least a give number of document is visible for searchers
|
* Waits until at least a give number of document is visible for searchers
|
||||||
*
|
*
|
||||||
|
|
|
@ -2314,6 +2314,11 @@ public final class InternalTestCluster extends TestCluster {
|
||||||
return filterNodes(nodes, NodeAndClient::isMasterEligible).size();
|
return filterNodes(nodes, NodeAndClient::isMasterEligible).size();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Set<String> masterEligibleNodeNames() {
|
||||||
|
var masterEligibleNodes = filterNodes(nodes, NodeAndClient::isMasterEligible);
|
||||||
|
return masterEligibleNodes.stream().map(nodeAndClient -> nodeAndClient.name).collect(Collectors.toSet());
|
||||||
|
}
|
||||||
|
|
||||||
public void setDisruptionScheme(ServiceDisruptionScheme scheme) {
|
public void setDisruptionScheme(ServiceDisruptionScheme scheme) {
|
||||||
assert activeDisruptionScheme == null
|
assert activeDisruptionScheme == null
|
||||||
: "there is already and active disruption [" + activeDisruptionScheme + "]. call clearDisruptionScheme first";
|
: "there is already and active disruption [" + activeDisruptionScheme + "]. call clearDisruptionScheme first";
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue