mirror of
https://github.com/elastic/elasticsearch.git
synced 2025-04-25 07:37:19 -04:00
Use Java 14 switch expressions (#82178)
JEP 361[https://openjdk.java.net/jeps/361] added support for switch expressions which can be much more terse and less error-prone than switch statements. Another useful feature of switch expressions is exhaustiveness: we can make sure that an enum switch expression covers all the cases at compile time.
This commit is contained in:
parent
35a79bc7d4
commit
0699c9351f
720 changed files with 7813 additions and 13306 deletions
|
@ -89,16 +89,11 @@ public class RoundingBenchmark {
|
||||||
roundingBuilder = Rounding.builder(TimeValue.parseTimeValue(interval, "interval"));
|
roundingBuilder = Rounding.builder(TimeValue.parseTimeValue(interval, "interval"));
|
||||||
}
|
}
|
||||||
Rounding rounding = roundingBuilder.timeZone(ZoneId.of(zone)).build();
|
Rounding rounding = roundingBuilder.timeZone(ZoneId.of(zone)).build();
|
||||||
switch (rounder) {
|
rounderBuilder = switch (rounder) {
|
||||||
case "java time":
|
case "java time" -> rounding::prepareJavaTime;
|
||||||
rounderBuilder = rounding::prepareJavaTime;
|
case "es" -> () -> rounding.prepare(min, max);
|
||||||
break;
|
default -> throw new IllegalArgumentException("Expected rounder to be [java time] or [es]");
|
||||||
case "es":
|
};
|
||||||
rounderBuilder = () -> rounding.prepare(min, max);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("Expectd rounder to be [java time] or [es]");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Benchmark
|
@Benchmark
|
||||||
|
|
|
@ -101,28 +101,19 @@ public class ScriptScoreBenchmark {
|
||||||
|
|
||||||
@Setup
|
@Setup
|
||||||
public void setupScript() {
|
public void setupScript() {
|
||||||
switch (script) {
|
factory = switch (script) {
|
||||||
case "expression":
|
case "expression" -> scriptModule.engines.get("expression").compile("test", "doc['n'].value", ScoreScript.CONTEXT, Map.of());
|
||||||
factory = scriptModule.engines.get("expression").compile("test", "doc['n'].value", ScoreScript.CONTEXT, Map.of());
|
case "metal" -> bareMetalScript();
|
||||||
break;
|
case "painless_cast" -> scriptModule.engines.get("painless")
|
||||||
case "metal":
|
|
||||||
factory = bareMetalScript();
|
|
||||||
break;
|
|
||||||
case "painless_cast":
|
|
||||||
factory = scriptModule.engines.get("painless")
|
|
||||||
.compile(
|
.compile(
|
||||||
"test",
|
"test",
|
||||||
"((org.elasticsearch.index.fielddata.ScriptDocValues.Longs)doc['n']).value",
|
"((org.elasticsearch.index.fielddata.ScriptDocValues.Longs)doc['n']).value",
|
||||||
ScoreScript.CONTEXT,
|
ScoreScript.CONTEXT,
|
||||||
Map.of()
|
Map.of()
|
||||||
);
|
);
|
||||||
break;
|
case "painless_def" -> scriptModule.engines.get("painless").compile("test", "doc['n'].value", ScoreScript.CONTEXT, Map.of());
|
||||||
case "painless_def":
|
default -> throw new IllegalArgumentException("Don't know how to implement script [" + script + "]");
|
||||||
factory = scriptModule.engines.get("painless").compile("test", "doc['n'].value", ScoreScript.CONTEXT, Map.of());
|
};
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("Don't know how to implement script [" + script + "]");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Setup
|
@Setup
|
||||||
|
|
|
@ -107,20 +107,12 @@ public class AggConstructionContentionBenchmark {
|
||||||
|
|
||||||
@Setup
|
@Setup
|
||||||
public void setup() {
|
public void setup() {
|
||||||
switch (breaker) {
|
breakerService = switch (breaker) {
|
||||||
case "real":
|
case "real", "preallocate" -> new HierarchyCircuitBreakerService(Settings.EMPTY, List.of(), clusterSettings);
|
||||||
breakerService = new HierarchyCircuitBreakerService(Settings.EMPTY, List.of(), clusterSettings);
|
case "noop" -> new NoneCircuitBreakerService();
|
||||||
break;
|
default -> throw new UnsupportedOperationException();
|
||||||
case "preallocate":
|
};
|
||||||
preallocateBreaker = true;
|
preallocateBreaker = breaker.equals("preallocate");
|
||||||
breakerService = new HierarchyCircuitBreakerService(Settings.EMPTY, List.of(), clusterSettings);
|
|
||||||
break;
|
|
||||||
case "noop":
|
|
||||||
breakerService = new NoneCircuitBreakerService();
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
bigArrays = new BigArrays(recycler, breakerService, "request");
|
bigArrays = new BigArrays(recycler, breakerService, "request");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -52,22 +52,13 @@ public class FetchSourcePhaseBenchmark {
|
||||||
|
|
||||||
@Setup
|
@Setup
|
||||||
public void setup() throws IOException {
|
public void setup() throws IOException {
|
||||||
switch (source) {
|
sourceBytes = switch (source) {
|
||||||
case "tiny":
|
case "tiny" -> new BytesArray("{\"message\": \"short\"}");
|
||||||
sourceBytes = new BytesArray("{\"message\": \"short\"}");
|
case "short" -> read300BytesExample();
|
||||||
break;
|
case "one_4k_field" -> buildBigExample("huge".repeat(1024));
|
||||||
case "short":
|
case "one_4m_field" -> buildBigExample("huge".repeat(1024 * 1024));
|
||||||
sourceBytes = read300BytesExample();
|
default -> throw new IllegalArgumentException("Unknown source [" + source + "]");
|
||||||
break;
|
};
|
||||||
case "one_4k_field":
|
|
||||||
sourceBytes = buildBigExample("huge".repeat(1024));
|
|
||||||
break;
|
|
||||||
case "one_4m_field":
|
|
||||||
sourceBytes = buildBigExample("huge".repeat(1024 * 1024));
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("Unknown source [" + source + "]");
|
|
||||||
}
|
|
||||||
fetchContext = new FetchSourceContext(
|
fetchContext = new FetchSourceContext(
|
||||||
true,
|
true,
|
||||||
Strings.splitStringByCommaToArray(includes),
|
Strings.splitStringByCommaToArray(includes),
|
||||||
|
|
|
@ -64,20 +64,12 @@ public class FilterContentBenchmark {
|
||||||
|
|
||||||
@Setup
|
@Setup
|
||||||
public void setup() throws IOException {
|
public void setup() throws IOException {
|
||||||
String sourceFile;
|
String sourceFile = switch (type) {
|
||||||
switch (type) {
|
case "cluster_stats" -> "monitor_cluster_stats.json";
|
||||||
case "cluster_stats":
|
case "index_stats" -> "monitor_index_stats.json";
|
||||||
sourceFile = "monitor_cluster_stats.json";
|
case "node_stats" -> "monitor_node_stats.json";
|
||||||
break;
|
default -> throw new IllegalArgumentException("Unknown type [" + type + "]");
|
||||||
case "index_stats":
|
};
|
||||||
sourceFile = "monitor_index_stats.json";
|
|
||||||
break;
|
|
||||||
case "node_stats":
|
|
||||||
sourceFile = "monitor_node_stats.json";
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("Unknown type [" + type + "]");
|
|
||||||
}
|
|
||||||
source = readSource(sourceFile);
|
source = readSource(sourceFile);
|
||||||
filters = buildFilters();
|
filters = buildFilters();
|
||||||
parserConfig = buildParseConfig();
|
parserConfig = buildParseConfig();
|
||||||
|
@ -87,17 +79,12 @@ public class FilterContentBenchmark {
|
||||||
Map<String, Object> flattenMap = Maps.flatten(XContentHelper.convertToMap(source, true, XContentType.JSON).v2(), false, true);
|
Map<String, Object> flattenMap = Maps.flatten(XContentHelper.convertToMap(source, true, XContentType.JSON).v2(), false, true);
|
||||||
Set<String> keys = flattenMap.keySet();
|
Set<String> keys = flattenMap.keySet();
|
||||||
AtomicInteger count = new AtomicInteger();
|
AtomicInteger count = new AtomicInteger();
|
||||||
switch (fieldCount) {
|
return switch (fieldCount) {
|
||||||
case "10_field":
|
case "10_field" -> keys.stream().filter(key -> count.getAndIncrement() % 5 == 0).limit(10).collect(Collectors.toSet());
|
||||||
return keys.stream().filter(key -> count.getAndIncrement() % 5 == 0).limit(10).collect(Collectors.toSet());
|
case "half_field" -> keys.stream().filter(key -> count.getAndIncrement() % 2 == 0).collect(Collectors.toSet());
|
||||||
case "half_field":
|
case "all_field" -> new HashSet<>(keys);
|
||||||
return keys.stream().filter(key -> count.getAndIncrement() % 2 == 0).collect(Collectors.toSet());
|
case "wildcard_field" -> new HashSet<>(Arrays.asList("*stats"));
|
||||||
case "all_field":
|
case "10_wildcard_field" -> Set.of(
|
||||||
return new HashSet<>(keys);
|
|
||||||
case "wildcard_field":
|
|
||||||
return new HashSet<>(Arrays.asList("*stats"));
|
|
||||||
case "10_wildcard_field":
|
|
||||||
return Set.of(
|
|
||||||
"*stats.nodes*",
|
"*stats.nodes*",
|
||||||
"*stats.ind*",
|
"*stats.ind*",
|
||||||
"*sta*.shards",
|
"*sta*.shards",
|
||||||
|
@ -109,9 +96,8 @@ public class FilterContentBenchmark {
|
||||||
inclusive ? "*stats.**.threads" : "*stats.*.threads",
|
inclusive ? "*stats.**.threads" : "*stats.*.threads",
|
||||||
"*source_node.t*"
|
"*source_node.t*"
|
||||||
);
|
);
|
||||||
default:
|
default -> throw new IllegalArgumentException("Unknown type [" + type + "]");
|
||||||
throw new IllegalArgumentException("Unknown type [" + type + "]");
|
};
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Benchmark
|
@Benchmark
|
||||||
|
|
|
@ -44,18 +44,11 @@ public class SymbolicLinkPreservingTar extends Tar {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected CopyAction createCopyAction() {
|
protected CopyAction createCopyAction() {
|
||||||
final ArchiveOutputStreamFactory compressor;
|
final ArchiveOutputStreamFactory compressor = switch (getCompression()) {
|
||||||
switch (getCompression()) {
|
case BZIP2 -> Bzip2Archiver.getCompressor();
|
||||||
case BZIP2:
|
case GZIP -> GzipArchiver.getCompressor();
|
||||||
compressor = Bzip2Archiver.getCompressor();
|
default -> new SimpleCompressor();
|
||||||
break;
|
};
|
||||||
case GZIP:
|
|
||||||
compressor = GzipArchiver.getCompressor();
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
compressor = new SimpleCompressor();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
return new SymbolicLinkPreservingTarCopyAction(getArchiveFile(), compressor, isPreserveFileTimestamps());
|
return new SymbolicLinkPreservingTarCopyAction(getArchiveFile(), compressor, isPreserveFileTimestamps());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -87,23 +87,17 @@ public class TransformLog4jConfigFilter extends FilterReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
switch (keyParts[2]) {
|
switch (keyParts[2]) {
|
||||||
case "type":
|
case "type" -> {
|
||||||
if (value.equals("RollingFile")) {
|
if (value.equals("RollingFile")) {
|
||||||
value = "Console";
|
value = "Console";
|
||||||
}
|
}
|
||||||
line = key + " = " + value;
|
line = key + " = " + value;
|
||||||
break;
|
}
|
||||||
|
case "fileName", "filePattern", "policies", "strategy" -> {
|
||||||
case "fileName":
|
|
||||||
case "filePattern":
|
|
||||||
case "policies":
|
|
||||||
case "strategy":
|
|
||||||
// No longer applicable. Omit it.
|
// No longer applicable. Omit it.
|
||||||
skipNext = line.endsWith("\\");
|
skipNext = line.endsWith("\\");
|
||||||
continue;
|
continue;
|
||||||
|
}
|
||||||
default:
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
} else if (line.startsWith("rootLogger.appenderRef")) {
|
} else if (line.startsWith("rootLogger.appenderRef")) {
|
||||||
String[] parts = line.split("\\s*=\\s*");
|
String[] parts = line.split("\\s*=\\s*");
|
||||||
|
|
|
@ -21,15 +21,11 @@ public enum Architecture {
|
||||||
|
|
||||||
public static Architecture current() {
|
public static Architecture current() {
|
||||||
final String architecture = System.getProperty("os.arch", "");
|
final String architecture = System.getProperty("os.arch", "");
|
||||||
switch (architecture) {
|
return switch (architecture) {
|
||||||
case "amd64":
|
case "amd64", "x86_64" -> X64;
|
||||||
case "x86_64":
|
case "aarch64" -> AARCH64;
|
||||||
return X64;
|
default -> throw new IllegalArgumentException("can not determine architecture from [" + architecture + "]");
|
||||||
case "aarch64":
|
};
|
||||||
return AARCH64;
|
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("can not determine architecture from [" + architecture + "]");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,16 +36,12 @@ public abstract class AbstractBenchmark<T extends Closeable> {
|
||||||
System.exit(1);
|
System.exit(1);
|
||||||
}
|
}
|
||||||
switch (args[0]) {
|
switch (args[0]) {
|
||||||
case "search":
|
case "search" -> runSearchBenchmark(args);
|
||||||
runSearchBenchmark(args);
|
case "bulk" -> runBulkIndexBenchmark(args);
|
||||||
break;
|
default -> {
|
||||||
case "bulk":
|
|
||||||
runBulkIndexBenchmark(args);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
System.err.println("Unknown benchmark type [" + args[0] + "]");
|
System.err.println("Unknown benchmark type [" + args[0] + "]");
|
||||||
System.exit(1);
|
System.exit(1);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,15 +16,14 @@ public class BenchmarkMain {
|
||||||
@SuppressForbidden(reason = "system out is ok for a command line tool")
|
@SuppressForbidden(reason = "system out is ok for a command line tool")
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
String type = args[0];
|
String type = args[0];
|
||||||
AbstractBenchmark<?> benchmark = null;
|
AbstractBenchmark<?> benchmark = switch (type) {
|
||||||
switch (type) {
|
case "rest" -> new RestClientBenchmark();
|
||||||
case "rest":
|
default -> {
|
||||||
benchmark = new RestClientBenchmark();
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
System.err.println("Unknown client type [" + type + "]");
|
System.err.println("Unknown client type [" + type + "]");
|
||||||
System.exit(1);
|
System.exit(1);
|
||||||
|
yield null;
|
||||||
}
|
}
|
||||||
|
};
|
||||||
benchmark.run(Arrays.copyOfRange(args, 1, args.length));
|
benchmark.run(Arrays.copyOfRange(args, 1, args.length));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -163,14 +163,11 @@ public final class FollowInfoResponse {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Status fromString(String value) {
|
public static Status fromString(String value) {
|
||||||
switch (value) {
|
return switch (value) {
|
||||||
case "active":
|
case "active" -> Status.ACTIVE;
|
||||||
return Status.ACTIVE;
|
case "paused" -> Status.PAUSED;
|
||||||
case "paused":
|
default -> throw new IllegalArgumentException("unexpected status value [" + value + "]");
|
||||||
return Status.PAUSED;
|
};
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("unexpected status value [" + value + "]");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -149,20 +149,11 @@ public class GetIndexResponse {
|
||||||
parser.nextToken();
|
parser.nextToken();
|
||||||
if (parser.currentToken() == Token.START_OBJECT) {
|
if (parser.currentToken() == Token.START_OBJECT) {
|
||||||
switch (parser.currentName()) {
|
switch (parser.currentName()) {
|
||||||
case "aliases":
|
case "aliases" -> indexAliases = parseAliases(parser);
|
||||||
indexAliases = parseAliases(parser);
|
case "mappings" -> indexMappings = parseMappings(parser);
|
||||||
break;
|
case "settings" -> indexSettings = Settings.fromXContent(parser);
|
||||||
case "mappings":
|
case "defaults" -> indexDefaultSettings = Settings.fromXContent(parser);
|
||||||
indexMappings = parseMappings(parser);
|
default -> parser.skipChildren();
|
||||||
break;
|
|
||||||
case "settings":
|
|
||||||
indexSettings = Settings.fromXContent(parser);
|
|
||||||
break;
|
|
||||||
case "defaults":
|
|
||||||
indexDefaultSettings = Settings.fromXContent(parser);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
parser.skipChildren();
|
|
||||||
}
|
}
|
||||||
} else if (parser.currentToken() == Token.VALUE_STRING) {
|
} else if (parser.currentToken() == Token.VALUE_STRING) {
|
||||||
if (parser.currentName().equals("data_stream")) {
|
if (parser.currentName().equals("data_stream")) {
|
||||||
|
|
|
@ -27,15 +27,11 @@ public enum LicenseStatus {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static LicenseStatus fromString(String value) {
|
public static LicenseStatus fromString(String value) {
|
||||||
switch (value) {
|
return switch (value) {
|
||||||
case "active":
|
case "active" -> ACTIVE;
|
||||||
return ACTIVE;
|
case "invalid" -> INVALID;
|
||||||
case "invalid":
|
case "expired" -> EXPIRED;
|
||||||
return INVALID;
|
default -> throw new IllegalArgumentException("unknown license status [" + value + "]");
|
||||||
case "expired":
|
};
|
||||||
return EXPIRED;
|
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("unknown license status [" + value + "]");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -42,15 +42,11 @@ public enum LicensesStatus {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static LicensesStatus fromString(String value) {
|
public static LicensesStatus fromString(String value) {
|
||||||
switch (value) {
|
return switch (value) {
|
||||||
case "valid":
|
case "valid" -> VALID;
|
||||||
return VALID;
|
case "invalid" -> INVALID;
|
||||||
case "invalid":
|
case "expired" -> EXPIRED;
|
||||||
return INVALID;
|
default -> throw new IllegalArgumentException("unknown licenses status [" + value + "]");
|
||||||
case "expired":
|
};
|
||||||
return EXPIRED;
|
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("unknown licenses status [" + value + "]");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -164,25 +164,16 @@ public final class RoleMapperExpressionParser {
|
||||||
}
|
}
|
||||||
|
|
||||||
private Object parseFieldValue(XContentParser parser) throws IOException {
|
private Object parseFieldValue(XContentParser parser) throws IOException {
|
||||||
switch (parser.currentToken()) {
|
return switch (parser.currentToken()) {
|
||||||
case VALUE_STRING:
|
case VALUE_STRING -> parser.text();
|
||||||
return parser.text();
|
case VALUE_BOOLEAN -> parser.booleanValue();
|
||||||
|
case VALUE_NUMBER -> parser.longValue();
|
||||||
case VALUE_BOOLEAN:
|
case VALUE_NULL -> null;
|
||||||
return parser.booleanValue();
|
default -> throw new ElasticsearchParseException(
|
||||||
|
|
||||||
case VALUE_NUMBER:
|
|
||||||
return parser.longValue();
|
|
||||||
|
|
||||||
case VALUE_NULL:
|
|
||||||
return null;
|
|
||||||
|
|
||||||
default:
|
|
||||||
throw new ElasticsearchParseException(
|
|
||||||
"failed to parse rules expression. expected a field value but found [{}] instead",
|
"failed to parse rules expression. expected a field value but found [{}] instead",
|
||||||
parser.currentToken()
|
parser.currentToken()
|
||||||
);
|
);
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -75,24 +75,17 @@ public class GroupConfig implements ToXContentObject {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
SingleGroupSource groupSource = null;
|
SingleGroupSource groupSource = switch (groupType) {
|
||||||
switch (groupType) {
|
case "terms" -> TermsGroupSource.fromXContent(parser);
|
||||||
case "terms":
|
case "histogram" -> HistogramGroupSource.fromXContent(parser);
|
||||||
groupSource = TermsGroupSource.fromXContent(parser);
|
case "date_histogram" -> DateHistogramGroupSource.fromXContent(parser);
|
||||||
break;
|
case "geotile_grid" -> GeoTileGroupSource.fromXContent(parser);
|
||||||
case "histogram":
|
default -> {
|
||||||
groupSource = HistogramGroupSource.fromXContent(parser);
|
|
||||||
break;
|
|
||||||
case "date_histogram":
|
|
||||||
groupSource = DateHistogramGroupSource.fromXContent(parser);
|
|
||||||
break;
|
|
||||||
case "geotile_grid":
|
|
||||||
groupSource = GeoTileGroupSource.fromXContent(parser);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
// not a valid group source. Consume up to the dest field end object
|
// not a valid group source. Consume up to the dest field end object
|
||||||
consumeUntilEndObject(parser, 2);
|
consumeUntilEndObject(parser, 2);
|
||||||
|
yield null;
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
|
||||||
if (groupSource != null) {
|
if (groupSource != null) {
|
||||||
groups.put(destinationFieldName, groupSource);
|
groups.put(destinationFieldName, groupSource);
|
||||||
|
|
|
@ -69,29 +69,28 @@ public class ClusterRequestConvertersTests extends ESTestCase {
|
||||||
String timeout = ESTestCase.randomTimeValue();
|
String timeout = ESTestCase.randomTimeValue();
|
||||||
String masterTimeout = ESTestCase.randomTimeValue();
|
String masterTimeout = ESTestCase.randomTimeValue();
|
||||||
switch (timeoutType) {
|
switch (timeoutType) {
|
||||||
case "timeout":
|
case "timeout" -> {
|
||||||
healthRequest.timeout(timeout);
|
healthRequest.timeout(timeout);
|
||||||
expectedParams.put("timeout", timeout);
|
expectedParams.put("timeout", timeout);
|
||||||
// If Master Timeout wasn't set it uses the same value as Timeout
|
// If Master Timeout wasn't set it uses the same value as Timeout
|
||||||
expectedParams.put("master_timeout", timeout);
|
expectedParams.put("master_timeout", timeout);
|
||||||
break;
|
}
|
||||||
case "masterTimeout":
|
case "masterTimeout" -> {
|
||||||
expectedParams.put("timeout", "30s");
|
expectedParams.put("timeout", "30s");
|
||||||
healthRequest.masterNodeTimeout(masterTimeout);
|
healthRequest.masterNodeTimeout(masterTimeout);
|
||||||
expectedParams.put("master_timeout", masterTimeout);
|
expectedParams.put("master_timeout", masterTimeout);
|
||||||
break;
|
}
|
||||||
case "both":
|
case "both" -> {
|
||||||
healthRequest.timeout(timeout);
|
healthRequest.timeout(timeout);
|
||||||
expectedParams.put("timeout", timeout);
|
expectedParams.put("timeout", timeout);
|
||||||
healthRequest.masterNodeTimeout(timeout);
|
healthRequest.masterNodeTimeout(timeout);
|
||||||
expectedParams.put("master_timeout", timeout);
|
expectedParams.put("master_timeout", timeout);
|
||||||
break;
|
}
|
||||||
case "none":
|
case "none" -> {
|
||||||
expectedParams.put("timeout", "30s");
|
expectedParams.put("timeout", "30s");
|
||||||
expectedParams.put("master_timeout", "30s");
|
expectedParams.put("master_timeout", "30s");
|
||||||
break;
|
}
|
||||||
default:
|
default -> throw new UnsupportedOperationException();
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
}
|
||||||
RequestConvertersTests.setRandomWaitForActiveShards(healthRequest::waitForActiveShards, ActiveShardCount.NONE, expectedParams);
|
RequestConvertersTests.setRandomWaitForActiveShards(healthRequest::waitForActiveShards, ActiveShardCount.NONE, expectedParams);
|
||||||
if (ESTestCase.randomBoolean()) {
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
|
|
@ -356,20 +356,11 @@ public class RollupIT extends ESRestHighLevelClientTestCase {
|
||||||
List<Map<String, Object>> timestampCaps = fieldCaps.get("date").getAggs();
|
List<Map<String, Object>> timestampCaps = fieldCaps.get("date").getAggs();
|
||||||
for (Map.Entry<String, Object> entry : timestampCaps.get(0).entrySet()) {
|
for (Map.Entry<String, Object> entry : timestampCaps.get(0).entrySet()) {
|
||||||
switch (entry.getKey()) {
|
switch (entry.getKey()) {
|
||||||
case "agg":
|
case "agg" -> assertThat(entry.getValue(), equalTo("date_histogram"));
|
||||||
assertThat(entry.getValue(), equalTo("date_histogram"));
|
case "delay" -> assertThat(entry.getValue(), equalTo("foo"));
|
||||||
break;
|
case "calendar_interval" -> assertThat(entry.getValue(), equalTo("1d"));
|
||||||
case "delay":
|
case "time_zone" -> assertThat(entry.getValue(), equalTo("UTC"));
|
||||||
assertThat(entry.getValue(), equalTo("foo"));
|
default -> fail("Unknown field cap: [" + entry.getKey() + "]");
|
||||||
break;
|
|
||||||
case "calendar_interval":
|
|
||||||
assertThat(entry.getValue(), equalTo("1d"));
|
|
||||||
break;
|
|
||||||
case "time_zone":
|
|
||||||
assertThat(entry.getValue(), equalTo("UTC"));
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
fail("Unknown field cap: [" + entry.getKey() + "]");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -471,20 +462,11 @@ public class RollupIT extends ESRestHighLevelClientTestCase {
|
||||||
List<Map<String, Object>> timestampCaps = fieldCaps.get("date").getAggs();
|
List<Map<String, Object>> timestampCaps = fieldCaps.get("date").getAggs();
|
||||||
for (Map.Entry<String, Object> entry : timestampCaps.get(0).entrySet()) {
|
for (Map.Entry<String, Object> entry : timestampCaps.get(0).entrySet()) {
|
||||||
switch (entry.getKey()) {
|
switch (entry.getKey()) {
|
||||||
case "agg":
|
case "agg" -> assertThat(entry.getValue(), equalTo("date_histogram"));
|
||||||
assertThat(entry.getValue(), equalTo("date_histogram"));
|
case "delay" -> assertThat(entry.getValue(), equalTo("foo"));
|
||||||
break;
|
case "calendar_interval" -> assertThat(entry.getValue(), equalTo("1d"));
|
||||||
case "delay":
|
case "time_zone" -> assertThat(entry.getValue(), equalTo("UTC"));
|
||||||
assertThat(entry.getValue(), equalTo("foo"));
|
default -> fail("Unknown field cap: [" + entry.getKey() + "]");
|
||||||
break;
|
|
||||||
case "calendar_interval":
|
|
||||||
assertThat(entry.getValue(), equalTo("1d"));
|
|
||||||
break;
|
|
||||||
case "time_zone":
|
|
||||||
assertThat(entry.getValue(), equalTo("UTC"));
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
fail("Unknown field cap: [" + entry.getKey() + "]");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -27,19 +27,16 @@ public class XPackRequestConvertersTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
int option = ESTestCase.between(0, 2);
|
int option = ESTestCase.between(0, 2);
|
||||||
switch (option) {
|
switch (option) {
|
||||||
case 0:
|
case 0 -> infoRequest.setCategories(EnumSet.allOf(XPackInfoRequest.Category.class));
|
||||||
infoRequest.setCategories(EnumSet.allOf(XPackInfoRequest.Category.class));
|
case 1 -> {
|
||||||
break;
|
|
||||||
case 1:
|
|
||||||
infoRequest.setCategories(EnumSet.of(XPackInfoRequest.Category.FEATURES));
|
infoRequest.setCategories(EnumSet.of(XPackInfoRequest.Category.FEATURES));
|
||||||
expectedParams.put("categories", "features");
|
expectedParams.put("categories", "features");
|
||||||
break;
|
}
|
||||||
case 2:
|
case 2 -> {
|
||||||
infoRequest.setCategories(EnumSet.of(XPackInfoRequest.Category.FEATURES, XPackInfoRequest.Category.BUILD));
|
infoRequest.setCategories(EnumSet.of(XPackInfoRequest.Category.FEATURES, XPackInfoRequest.Category.BUILD));
|
||||||
expectedParams.put("categories", "build,features");
|
expectedParams.put("categories", "build,features");
|
||||||
break;
|
}
|
||||||
default:
|
default -> throw new IllegalArgumentException("invalid option [" + option + "]");
|
||||||
throw new IllegalArgumentException("invalid option [" + option + "]");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Request request = XPackRequestConverters.info(infoRequest);
|
Request request = XPackRequestConverters.info(infoRequest);
|
||||||
|
|
|
@ -116,17 +116,19 @@ public class EqlSearchResponseTests extends AbstractResponseTestCase<
|
||||||
|
|
||||||
private static Tuple<DocumentField, DocumentField> randomDocumentField(XContentType xType) {
|
private static Tuple<DocumentField, DocumentField> randomDocumentField(XContentType xType) {
|
||||||
switch (randomIntBetween(0, 2)) {
|
switch (randomIntBetween(0, 2)) {
|
||||||
case 0:
|
case 0 -> {
|
||||||
String fieldName = randomAlphaOfLengthBetween(3, 10);
|
String fieldName = randomAlphaOfLengthBetween(3, 10);
|
||||||
Tuple<List<Object>, List<Object>> tuple = RandomObjects.randomStoredFieldValues(random(), xType);
|
Tuple<List<Object>, List<Object>> tuple = RandomObjects.randomStoredFieldValues(random(), xType);
|
||||||
DocumentField input = new DocumentField(fieldName, tuple.v1());
|
DocumentField input = new DocumentField(fieldName, tuple.v1());
|
||||||
DocumentField expected = new DocumentField(fieldName, tuple.v2());
|
DocumentField expected = new DocumentField(fieldName, tuple.v2());
|
||||||
return Tuple.tuple(input, expected);
|
return Tuple.tuple(input, expected);
|
||||||
case 1:
|
}
|
||||||
|
case 1 -> {
|
||||||
List<Object> listValues = randomList(1, 5, () -> randomList(1, 5, ESTestCase::randomInt));
|
List<Object> listValues = randomList(1, 5, () -> randomList(1, 5, ESTestCase::randomInt));
|
||||||
DocumentField listField = new DocumentField(randomAlphaOfLength(5), listValues);
|
DocumentField listField = new DocumentField(randomAlphaOfLength(5), listValues);
|
||||||
return Tuple.tuple(listField, listField);
|
return Tuple.tuple(listField, listField);
|
||||||
case 2:
|
}
|
||||||
|
case 2 -> {
|
||||||
List<Object> objectValues = randomList(
|
List<Object> objectValues = randomList(
|
||||||
1,
|
1,
|
||||||
5,
|
5,
|
||||||
|
@ -141,8 +143,8 @@ public class EqlSearchResponseTests extends AbstractResponseTestCase<
|
||||||
);
|
);
|
||||||
DocumentField objectField = new DocumentField(randomAlphaOfLength(5), objectValues);
|
DocumentField objectField = new DocumentField(randomAlphaOfLength(5), objectValues);
|
||||||
return Tuple.tuple(objectField, objectField);
|
return Tuple.tuple(objectField, objectField);
|
||||||
default:
|
}
|
||||||
throw new IllegalStateException();
|
default -> throw new IllegalStateException();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -212,14 +214,11 @@ public class EqlSearchResponseTests extends AbstractResponseTestCase<
|
||||||
|
|
||||||
public static org.elasticsearch.xpack.eql.action.EqlSearchResponse createRandomInstance(TotalHits totalHits, XContentType xType) {
|
public static org.elasticsearch.xpack.eql.action.EqlSearchResponse createRandomInstance(TotalHits totalHits, XContentType xType) {
|
||||||
int type = between(0, 1);
|
int type = between(0, 1);
|
||||||
switch (type) {
|
return switch (type) {
|
||||||
case 0:
|
case 0 -> createRandomEventsResponse(totalHits, xType);
|
||||||
return createRandomEventsResponse(totalHits, xType);
|
case 1 -> createRandomSequencesResponse(totalHits, xType);
|
||||||
case 1:
|
default -> null;
|
||||||
return createRandomSequencesResponse(totalHits, xType);
|
};
|
||||||
default:
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -49,15 +49,12 @@ public class ExplainLifecycleRequestTests extends ESTestCase {
|
||||||
String[] indices = instance.getIndices();
|
String[] indices = instance.getIndices();
|
||||||
IndicesOptions indicesOptions = instance.indicesOptions();
|
IndicesOptions indicesOptions = instance.indicesOptions();
|
||||||
switch (between(0, 1)) {
|
switch (between(0, 1)) {
|
||||||
case 0:
|
case 0 -> indices = randomValueOtherThanMany(
|
||||||
indices = randomValueOtherThanMany(
|
|
||||||
i -> Arrays.equals(i, instance.getIndices()),
|
i -> Arrays.equals(i, instance.getIndices()),
|
||||||
() -> generateRandomStringArray(20, 10, false, false)
|
() -> generateRandomStringArray(20, 10, false, false)
|
||||||
);
|
);
|
||||||
break;
|
case 1 -> indicesOptions = randomValueOtherThan(
|
||||||
case 1:
|
instance.indicesOptions(),
|
||||||
indicesOptions = randomValueOtherThan(
|
|
||||||
indicesOptions,
|
|
||||||
() -> IndicesOptions.fromOptions(
|
() -> IndicesOptions.fromOptions(
|
||||||
randomBoolean(),
|
randomBoolean(),
|
||||||
randomBoolean(),
|
randomBoolean(),
|
||||||
|
@ -69,9 +66,7 @@ public class ExplainLifecycleRequestTests extends ESTestCase {
|
||||||
randomBoolean()
|
randomBoolean()
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
break;
|
default -> throw new AssertionError("Illegal randomisation branch");
|
||||||
default:
|
|
||||||
throw new AssertionError("Illegal randomisation branch");
|
|
||||||
}
|
}
|
||||||
ExplainLifecycleRequest newRequest = new ExplainLifecycleRequest(indices);
|
ExplainLifecycleRequest newRequest = new ExplainLifecycleRequest(indices);
|
||||||
newRequest.indicesOptions(indicesOptions);
|
newRequest.indicesOptions(indicesOptions);
|
||||||
|
|
|
@ -226,60 +226,37 @@ public class LifecyclePolicyTests extends AbstractXContentTestCase<LifecyclePoli
|
||||||
List<String> phaseNames = Arrays.asList("hot", "warm", "cold", "delete");
|
List<String> phaseNames = Arrays.asList("hot", "warm", "cold", "delete");
|
||||||
Map<String, Phase> phases = new HashMap<>(phaseNames.size());
|
Map<String, Phase> phases = new HashMap<>(phaseNames.size());
|
||||||
Function<String, Set<String>> validActions = (phase) -> {
|
Function<String, Set<String>> validActions = (phase) -> {
|
||||||
switch (phase) {
|
return switch (phase) {
|
||||||
case "hot":
|
case "hot" -> VALID_HOT_ACTIONS;
|
||||||
return VALID_HOT_ACTIONS;
|
case "warm" -> VALID_WARM_ACTIONS;
|
||||||
case "warm":
|
case "cold" -> VALID_COLD_ACTIONS;
|
||||||
return VALID_WARM_ACTIONS;
|
case "delete" -> VALID_DELETE_ACTIONS;
|
||||||
case "cold":
|
default -> throw new IllegalArgumentException("invalid phase [" + phase + "]");
|
||||||
return VALID_COLD_ACTIONS;
|
};
|
||||||
case "delete":
|
|
||||||
return VALID_DELETE_ACTIONS;
|
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("invalid phase [" + phase + "]");
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
Function<String, Boolean> allowEmptyActions = (phase) -> {
|
Function<String, Boolean> allowEmptyActions = (phase) -> {
|
||||||
switch (phase) {
|
return switch (phase) {
|
||||||
case "hot":
|
case "hot", "warm", "cold" -> true;
|
||||||
case "warm":
|
case "delete" -> false;
|
||||||
case "cold":
|
default -> throw new IllegalArgumentException("invalid phase [" + phase + "]");
|
||||||
return true;
|
};
|
||||||
case "delete":
|
|
||||||
return false;
|
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("invalid phase [" + phase + "]");
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
Function<String, LifecycleAction> randomAction = (action) -> {
|
Function<String, LifecycleAction> randomAction = (action) -> {
|
||||||
switch (action) {
|
return switch (action) {
|
||||||
case AllocateAction.NAME:
|
case AllocateAction.NAME -> AllocateActionTests.randomInstance();
|
||||||
return AllocateActionTests.randomInstance();
|
case DeleteAction.NAME -> new DeleteAction();
|
||||||
case DeleteAction.NAME:
|
case ForceMergeAction.NAME -> ForceMergeActionTests.randomInstance();
|
||||||
return new DeleteAction();
|
case ReadOnlyAction.NAME -> new ReadOnlyAction();
|
||||||
case ForceMergeAction.NAME:
|
case RolloverAction.NAME -> RolloverActionTests.randomInstance();
|
||||||
return ForceMergeActionTests.randomInstance();
|
case ShrinkAction.NAME -> ShrinkActionTests.randomInstance();
|
||||||
case ReadOnlyAction.NAME:
|
case FreezeAction.NAME -> new FreezeAction();
|
||||||
return new ReadOnlyAction();
|
case WaitForSnapshotAction.NAME -> WaitForSnapshotActionTests.randomInstance();
|
||||||
case RolloverAction.NAME:
|
case SetPriorityAction.NAME -> SetPriorityActionTests.randomInstance();
|
||||||
return RolloverActionTests.randomInstance();
|
case UnfollowAction.NAME -> new UnfollowAction();
|
||||||
case ShrinkAction.NAME:
|
case SearchableSnapshotAction.NAME -> new SearchableSnapshotAction("repo", randomBoolean());
|
||||||
return ShrinkActionTests.randomInstance();
|
case MigrateAction.NAME -> new MigrateAction(randomBoolean());
|
||||||
case FreezeAction.NAME:
|
default -> throw new IllegalArgumentException("invalid action [" + action + "]");
|
||||||
return new FreezeAction();
|
};
|
||||||
case WaitForSnapshotAction.NAME:
|
|
||||||
return WaitForSnapshotActionTests.randomInstance();
|
|
||||||
case SetPriorityAction.NAME:
|
|
||||||
return SetPriorityActionTests.randomInstance();
|
|
||||||
case UnfollowAction.NAME:
|
|
||||||
return new UnfollowAction();
|
|
||||||
case SearchableSnapshotAction.NAME:
|
|
||||||
return new SearchableSnapshotAction("repo", randomBoolean());
|
|
||||||
case MigrateAction.NAME:
|
|
||||||
return new MigrateAction(randomBoolean());
|
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("invalid action [" + action + "]");
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
TimeValue prev = null;
|
TimeValue prev = null;
|
||||||
boolean searchableSnapshotSeen = false;
|
boolean searchableSnapshotSeen = false;
|
||||||
|
@ -318,33 +295,20 @@ public class LifecyclePolicyTests extends AbstractXContentTestCase<LifecyclePoli
|
||||||
}
|
}
|
||||||
|
|
||||||
private LifecycleAction getTestAction(String actionName) {
|
private LifecycleAction getTestAction(String actionName) {
|
||||||
switch (actionName) {
|
return switch (actionName) {
|
||||||
case AllocateAction.NAME:
|
case AllocateAction.NAME -> AllocateActionTests.randomInstance();
|
||||||
return AllocateActionTests.randomInstance();
|
case DeleteAction.NAME -> new DeleteAction();
|
||||||
case DeleteAction.NAME:
|
case ForceMergeAction.NAME -> ForceMergeActionTests.randomInstance();
|
||||||
return new DeleteAction();
|
case ReadOnlyAction.NAME -> new ReadOnlyAction();
|
||||||
case ForceMergeAction.NAME:
|
case RolloverAction.NAME -> RolloverActionTests.randomInstance();
|
||||||
return ForceMergeActionTests.randomInstance();
|
case ShrinkAction.NAME -> ShrinkActionTests.randomInstance();
|
||||||
case ReadOnlyAction.NAME:
|
case FreezeAction.NAME -> new FreezeAction();
|
||||||
return new ReadOnlyAction();
|
case WaitForSnapshotAction.NAME -> WaitForSnapshotActionTests.randomInstance();
|
||||||
case RolloverAction.NAME:
|
case SetPriorityAction.NAME -> SetPriorityActionTests.randomInstance();
|
||||||
return RolloverActionTests.randomInstance();
|
case SearchableSnapshotAction.NAME -> SearchableSnapshotActionTests.randomInstance();
|
||||||
case ShrinkAction.NAME:
|
case UnfollowAction.NAME -> new UnfollowAction();
|
||||||
return ShrinkActionTests.randomInstance();
|
case MigrateAction.NAME -> new MigrateAction(randomBoolean());
|
||||||
case FreezeAction.NAME:
|
default -> throw new IllegalArgumentException("unsupported phase action [" + actionName + "]");
|
||||||
return new FreezeAction();
|
};
|
||||||
case WaitForSnapshotAction.NAME:
|
|
||||||
return WaitForSnapshotActionTests.randomInstance();
|
|
||||||
case SetPriorityAction.NAME:
|
|
||||||
return SetPriorityActionTests.randomInstance();
|
|
||||||
case SearchableSnapshotAction.NAME:
|
|
||||||
return SearchableSnapshotActionTests.randomInstance();
|
|
||||||
case UnfollowAction.NAME:
|
|
||||||
return new UnfollowAction();
|
|
||||||
case MigrateAction.NAME:
|
|
||||||
return new MigrateAction(randomBoolean());
|
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("unsupported phase action [" + actionName + "]");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,16 +27,20 @@ import org.elasticsearch.xcontent.XContentParser;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Locale;
|
||||||
import java.util.function.Predicate;
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
public class EvaluateDataFrameResponseTests extends AbstractXContentTestCase<EvaluateDataFrameResponse> {
|
public class EvaluateDataFrameResponseTests extends AbstractXContentTestCase<EvaluateDataFrameResponse> {
|
||||||
|
private enum Evaluation {
|
||||||
|
OUTLIER_DETECTION,
|
||||||
|
CLASSIFICATION,
|
||||||
|
REGRESSION
|
||||||
|
}
|
||||||
|
|
||||||
public static EvaluateDataFrameResponse randomResponse() {
|
public static EvaluateDataFrameResponse randomResponse() {
|
||||||
String evaluationName = randomFrom(OutlierDetection.NAME, Classification.NAME, Regression.NAME);
|
String evaluationName = randomFrom(OutlierDetection.NAME, Classification.NAME, Regression.NAME);
|
||||||
List<EvaluationMetric.Result> metrics;
|
List<EvaluationMetric.Result> metrics = switch (Evaluation.valueOf(evaluationName.toUpperCase(Locale.ROOT))) {
|
||||||
switch (evaluationName) {
|
case OUTLIER_DETECTION -> randomSubsetOf(
|
||||||
case OutlierDetection.NAME:
|
|
||||||
metrics = randomSubsetOf(
|
|
||||||
Arrays.asList(
|
Arrays.asList(
|
||||||
AucRocResultTests.randomResult(),
|
AucRocResultTests.randomResult(),
|
||||||
PrecisionMetricResultTests.randomResult(),
|
PrecisionMetricResultTests.randomResult(),
|
||||||
|
@ -44,14 +48,10 @@ public class EvaluateDataFrameResponseTests extends AbstractXContentTestCase<Eva
|
||||||
ConfusionMatrixMetricResultTests.randomResult()
|
ConfusionMatrixMetricResultTests.randomResult()
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
break;
|
case REGRESSION -> randomSubsetOf(
|
||||||
case Regression.NAME:
|
|
||||||
metrics = randomSubsetOf(
|
|
||||||
Arrays.asList(MeanSquaredErrorMetricResultTests.randomResult(), RSquaredMetricResultTests.randomResult())
|
Arrays.asList(MeanSquaredErrorMetricResultTests.randomResult(), RSquaredMetricResultTests.randomResult())
|
||||||
);
|
);
|
||||||
break;
|
case CLASSIFICATION -> randomSubsetOf(
|
||||||
case Classification.NAME:
|
|
||||||
metrics = randomSubsetOf(
|
|
||||||
Arrays.asList(
|
Arrays.asList(
|
||||||
AucRocResultTests.randomResult(),
|
AucRocResultTests.randomResult(),
|
||||||
AccuracyMetricResultTests.randomResult(),
|
AccuracyMetricResultTests.randomResult(),
|
||||||
|
@ -60,10 +60,7 @@ public class EvaluateDataFrameResponseTests extends AbstractXContentTestCase<Eva
|
||||||
MulticlassConfusionMatrixMetricResultTests.randomResult()
|
MulticlassConfusionMatrixMetricResultTests.randomResult()
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
break;
|
};
|
||||||
default:
|
|
||||||
throw new AssertionError("Please add missing \"case\" variant to the \"switch\" statement");
|
|
||||||
}
|
|
||||||
return new EvaluateDataFrameResponse(evaluationName, metrics);
|
return new EvaluateDataFrameResponse(evaluationName, metrics);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -82,18 +82,14 @@ public class DateHistogramGroupConfigTests extends AbstractXContentTestCase<Date
|
||||||
final DateHistogramInterval delay = randomBoolean() ? new DateHistogramInterval(randomPositiveTimeValue()) : null;
|
final DateHistogramInterval delay = randomBoolean() ? new DateHistogramInterval(randomPositiveTimeValue()) : null;
|
||||||
final String timezone = randomBoolean() ? randomZone().toString() : null;
|
final String timezone = randomBoolean() ? randomZone().toString() : null;
|
||||||
int i = randomIntBetween(0, 2);
|
int i = randomIntBetween(0, 2);
|
||||||
final DateHistogramInterval interval;
|
final DateHistogramInterval interval = switch (i) {
|
||||||
switch (i) {
|
case 1 -> new DateHistogramInterval(randomTimeValue(1, 1, "m", "h", "d", "w"));
|
||||||
case 0:
|
default -> new DateHistogramInterval(randomPositiveTimeValue());
|
||||||
interval = new DateHistogramInterval(randomPositiveTimeValue());
|
};
|
||||||
return new DateHistogramGroupConfig.FixedInterval(field, interval, delay, timezone);
|
return switch (i) {
|
||||||
case 1:
|
case 0 -> new DateHistogramGroupConfig.FixedInterval(field, interval, delay, timezone);
|
||||||
interval = new DateHistogramInterval(randomTimeValue(1, 1, "m", "h", "d", "w"));
|
case 1 -> new DateHistogramGroupConfig.CalendarInterval(field, interval, delay, timezone);
|
||||||
return new DateHistogramGroupConfig.CalendarInterval(field, interval, delay, timezone);
|
default -> new DateHistogramGroupConfig(field, interval, delay, timezone);
|
||||||
default:
|
};
|
||||||
interval = new DateHistogramInterval(randomPositiveTimeValue());
|
|
||||||
return new DateHistogramGroupConfig(field, interval, delay, timezone);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -48,21 +48,18 @@ public class ClearServiceAccountTokenCacheRequestTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private ClearServiceAccountTokenCacheRequest mutateInstance(ClearServiceAccountTokenCacheRequest request) {
|
private ClearServiceAccountTokenCacheRequest mutateInstance(ClearServiceAccountTokenCacheRequest request) {
|
||||||
switch (randomIntBetween(0, 2)) {
|
return switch (randomIntBetween(0, 2)) {
|
||||||
case 0:
|
case 0 -> new ClearServiceAccountTokenCacheRequest(
|
||||||
return new ClearServiceAccountTokenCacheRequest(
|
|
||||||
randomValueOtherThan(request.getNamespace(), () -> randomAlphaOfLengthBetween(3, 8)),
|
randomValueOtherThan(request.getNamespace(), () -> randomAlphaOfLengthBetween(3, 8)),
|
||||||
request.getServiceName(),
|
request.getServiceName(),
|
||||||
request.getTokenNames()
|
request.getTokenNames()
|
||||||
);
|
);
|
||||||
case 1:
|
case 1 -> new ClearServiceAccountTokenCacheRequest(
|
||||||
return new ClearServiceAccountTokenCacheRequest(
|
|
||||||
request.getNamespace(),
|
request.getNamespace(),
|
||||||
randomValueOtherThan(request.getServiceName(), () -> randomAlphaOfLengthBetween(3, 8)),
|
randomValueOtherThan(request.getServiceName(), () -> randomAlphaOfLengthBetween(3, 8)),
|
||||||
request.getTokenNames()
|
request.getTokenNames()
|
||||||
);
|
);
|
||||||
default:
|
default -> new ClearServiceAccountTokenCacheRequest(
|
||||||
return new ClearServiceAccountTokenCacheRequest(
|
|
||||||
request.getNamespace(),
|
request.getNamespace(),
|
||||||
request.getServiceName(),
|
request.getServiceName(),
|
||||||
randomValueOtherThanMany(
|
randomValueOtherThanMany(
|
||||||
|
@ -70,6 +67,6 @@ public class ClearServiceAccountTokenCacheRequestTests extends ESTestCase {
|
||||||
() -> randomArray(0, 3, String[]::new, () -> randomAlphaOfLengthBetween(3, 8))
|
() -> randomArray(0, 3, String[]::new, () -> randomAlphaOfLengthBetween(3, 8))
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -86,27 +86,22 @@ public class CreateApiKeyResponseTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static CreateApiKeyResponse mutateTestItem(CreateApiKeyResponse original) {
|
private static CreateApiKeyResponse mutateTestItem(CreateApiKeyResponse original) {
|
||||||
switch (randomIntBetween(0, 3)) {
|
return switch (randomIntBetween(0, 3)) {
|
||||||
case 0:
|
case 0 -> new CreateApiKeyResponse(randomAlphaOfLength(7), original.getId(), original.getKey(), original.getExpiration());
|
||||||
return new CreateApiKeyResponse(randomAlphaOfLength(7), original.getId(), original.getKey(), original.getExpiration());
|
case 1 -> new CreateApiKeyResponse(
|
||||||
case 1:
|
|
||||||
return new CreateApiKeyResponse(
|
|
||||||
original.getName(),
|
original.getName(),
|
||||||
randomAlphaOfLengthBetween(4, 8),
|
randomAlphaOfLengthBetween(4, 8),
|
||||||
original.getKey(),
|
original.getKey(),
|
||||||
original.getExpiration()
|
original.getExpiration()
|
||||||
);
|
);
|
||||||
case 2:
|
case 2 -> new CreateApiKeyResponse(
|
||||||
return new CreateApiKeyResponse(
|
|
||||||
original.getName(),
|
original.getName(),
|
||||||
original.getId(),
|
original.getId(),
|
||||||
UUIDs.randomBase64UUIDSecureString(),
|
UUIDs.randomBase64UUIDSecureString(),
|
||||||
original.getExpiration()
|
original.getExpiration()
|
||||||
);
|
);
|
||||||
case 3:
|
case 3 -> new CreateApiKeyResponse(original.getName(), original.getId(), original.getKey(), Instant.ofEpochMilli(150000));
|
||||||
return new CreateApiKeyResponse(original.getName(), original.getId(), original.getKey(), Instant.ofEpochMilli(150000));
|
default -> new CreateApiKeyResponse(randomAlphaOfLength(7), original.getId(), original.getKey(), original.getExpiration());
|
||||||
default:
|
};
|
||||||
return new CreateApiKeyResponse(randomAlphaOfLength(7), original.getId(), original.getKey(), original.getExpiration());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -66,35 +66,31 @@ public class CreateServiceAccountTokenRequestTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private CreateServiceAccountTokenRequest mutateInstance(CreateServiceAccountTokenRequest request) {
|
private CreateServiceAccountTokenRequest mutateInstance(CreateServiceAccountTokenRequest request) {
|
||||||
switch (randomIntBetween(0, 3)) {
|
return switch (randomIntBetween(0, 3)) {
|
||||||
case 0:
|
case 0 -> new CreateServiceAccountTokenRequest(
|
||||||
return new CreateServiceAccountTokenRequest(
|
|
||||||
randomValueOtherThan(request.getNamespace(), () -> randomAlphaOfLengthBetween(3, 8)),
|
randomValueOtherThan(request.getNamespace(), () -> randomAlphaOfLengthBetween(3, 8)),
|
||||||
request.getServiceName(),
|
request.getServiceName(),
|
||||||
request.getTokenName(),
|
request.getTokenName(),
|
||||||
request.getRefreshPolicy()
|
request.getRefreshPolicy()
|
||||||
);
|
);
|
||||||
case 1:
|
case 1 -> new CreateServiceAccountTokenRequest(
|
||||||
return new CreateServiceAccountTokenRequest(
|
|
||||||
request.getNamespace(),
|
request.getNamespace(),
|
||||||
randomValueOtherThan(request.getServiceName(), () -> randomAlphaOfLengthBetween(3, 8)),
|
randomValueOtherThan(request.getServiceName(), () -> randomAlphaOfLengthBetween(3, 8)),
|
||||||
request.getTokenName(),
|
request.getTokenName(),
|
||||||
request.getRefreshPolicy()
|
request.getRefreshPolicy()
|
||||||
);
|
);
|
||||||
case 2:
|
case 2 -> new CreateServiceAccountTokenRequest(
|
||||||
return new CreateServiceAccountTokenRequest(
|
|
||||||
request.getNamespace(),
|
request.getNamespace(),
|
||||||
request.getServiceName(),
|
request.getServiceName(),
|
||||||
randomValueOtherThan(request.getTokenName(), () -> randomAlphaOfLengthBetween(3, 8)),
|
randomValueOtherThan(request.getTokenName(), () -> randomAlphaOfLengthBetween(3, 8)),
|
||||||
request.getRefreshPolicy()
|
request.getRefreshPolicy()
|
||||||
);
|
);
|
||||||
default:
|
default -> new CreateServiceAccountTokenRequest(
|
||||||
return new CreateServiceAccountTokenRequest(
|
|
||||||
request.getNamespace(),
|
request.getNamespace(),
|
||||||
request.getServiceName(),
|
request.getServiceName(),
|
||||||
request.getTokenName(),
|
request.getTokenName(),
|
||||||
randomValueOtherThan(request.getRefreshPolicy(), () -> randomFrom(RefreshPolicy.values()))
|
randomValueOtherThan(request.getRefreshPolicy(), () -> randomFrom(RefreshPolicy.values()))
|
||||||
);
|
);
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -65,36 +65,32 @@ public class DeleteServiceAccountTokenRequestTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private DeleteServiceAccountTokenRequest mutateInstance(DeleteServiceAccountTokenRequest request) {
|
private DeleteServiceAccountTokenRequest mutateInstance(DeleteServiceAccountTokenRequest request) {
|
||||||
switch (randomIntBetween(0, 3)) {
|
return switch (randomIntBetween(0, 3)) {
|
||||||
case 0:
|
case 0 -> new DeleteServiceAccountTokenRequest(
|
||||||
return new DeleteServiceAccountTokenRequest(
|
|
||||||
randomValueOtherThan(request.getNamespace(), () -> randomAlphaOfLengthBetween(3, 8)),
|
randomValueOtherThan(request.getNamespace(), () -> randomAlphaOfLengthBetween(3, 8)),
|
||||||
request.getServiceName(),
|
request.getServiceName(),
|
||||||
request.getTokenName(),
|
request.getTokenName(),
|
||||||
request.getRefreshPolicy()
|
request.getRefreshPolicy()
|
||||||
);
|
);
|
||||||
case 1:
|
case 1 -> new DeleteServiceAccountTokenRequest(
|
||||||
return new DeleteServiceAccountTokenRequest(
|
|
||||||
request.getNamespace(),
|
request.getNamespace(),
|
||||||
randomValueOtherThan(request.getServiceName(), () -> randomAlphaOfLengthBetween(3, 8)),
|
randomValueOtherThan(request.getServiceName(), () -> randomAlphaOfLengthBetween(3, 8)),
|
||||||
request.getTokenName(),
|
request.getTokenName(),
|
||||||
request.getRefreshPolicy()
|
request.getRefreshPolicy()
|
||||||
);
|
);
|
||||||
case 2:
|
case 2 -> new DeleteServiceAccountTokenRequest(
|
||||||
return new DeleteServiceAccountTokenRequest(
|
|
||||||
request.getNamespace(),
|
request.getNamespace(),
|
||||||
request.getServiceName(),
|
request.getServiceName(),
|
||||||
randomValueOtherThan(request.getTokenName(), () -> randomAlphaOfLengthBetween(3, 8)),
|
randomValueOtherThan(request.getTokenName(), () -> randomAlphaOfLengthBetween(3, 8)),
|
||||||
request.getRefreshPolicy()
|
request.getRefreshPolicy()
|
||||||
);
|
);
|
||||||
default:
|
default -> new DeleteServiceAccountTokenRequest(
|
||||||
return new DeleteServiceAccountTokenRequest(
|
|
||||||
request.getNamespace(),
|
request.getNamespace(),
|
||||||
request.getServiceName(),
|
request.getServiceName(),
|
||||||
request.getTokenName(),
|
request.getTokenName(),
|
||||||
randomValueOtherThan(request.getRefreshPolicy(), () -> randomFrom(RefreshPolicy.values()))
|
randomValueOtherThan(request.getRefreshPolicy(), () -> randomFrom(RefreshPolicy.values()))
|
||||||
);
|
);
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,7 +17,6 @@ import org.elasticsearch.xcontent.XContentType;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import static java.util.Collections.singletonList;
|
import static java.util.Collections.singletonList;
|
||||||
|
@ -79,10 +78,8 @@ public class ExpressionRoleMappingTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static ExpressionRoleMapping mutateTestItem(ExpressionRoleMapping original) throws IOException {
|
private static ExpressionRoleMapping mutateTestItem(ExpressionRoleMapping original) throws IOException {
|
||||||
ExpressionRoleMapping mutated = null;
|
return switch (randomIntBetween(0, 5)) {
|
||||||
switch (randomIntBetween(0, 5)) {
|
case 0 -> new ExpressionRoleMapping(
|
||||||
case 0:
|
|
||||||
mutated = new ExpressionRoleMapping(
|
|
||||||
"namechanged",
|
"namechanged",
|
||||||
FieldRoleMapperExpression.ofKeyValues("realm.name", "kerb1"),
|
FieldRoleMapperExpression.ofKeyValues("realm.name", "kerb1"),
|
||||||
singletonList("superuser"),
|
singletonList("superuser"),
|
||||||
|
@ -90,9 +87,7 @@ public class ExpressionRoleMappingTests extends ESTestCase {
|
||||||
null,
|
null,
|
||||||
true
|
true
|
||||||
);
|
);
|
||||||
break;
|
case 1 -> new ExpressionRoleMapping(
|
||||||
case 1:
|
|
||||||
mutated = new ExpressionRoleMapping(
|
|
||||||
"kerberosmapping",
|
"kerberosmapping",
|
||||||
FieldRoleMapperExpression.ofKeyValues("changed", "changed"),
|
FieldRoleMapperExpression.ofKeyValues("changed", "changed"),
|
||||||
singletonList("superuser"),
|
singletonList("superuser"),
|
||||||
|
@ -100,9 +95,7 @@ public class ExpressionRoleMappingTests extends ESTestCase {
|
||||||
null,
|
null,
|
||||||
true
|
true
|
||||||
);
|
);
|
||||||
break;
|
case 2 -> new ExpressionRoleMapping(
|
||||||
case 2:
|
|
||||||
mutated = new ExpressionRoleMapping(
|
|
||||||
"kerberosmapping",
|
"kerberosmapping",
|
||||||
FieldRoleMapperExpression.ofKeyValues("realm.name", "kerb1"),
|
FieldRoleMapperExpression.ofKeyValues("realm.name", "kerb1"),
|
||||||
singletonList("changed"),
|
singletonList("changed"),
|
||||||
|
@ -110,21 +103,15 @@ public class ExpressionRoleMappingTests extends ESTestCase {
|
||||||
null,
|
null,
|
||||||
true
|
true
|
||||||
);
|
);
|
||||||
break;
|
case 3 -> new ExpressionRoleMapping(
|
||||||
case 3:
|
|
||||||
Map<String, Object> metadata = new HashMap<>();
|
|
||||||
metadata.put("a", "b");
|
|
||||||
mutated = new ExpressionRoleMapping(
|
|
||||||
"kerberosmapping",
|
"kerberosmapping",
|
||||||
FieldRoleMapperExpression.ofKeyValues("realm.name", "kerb1"),
|
FieldRoleMapperExpression.ofKeyValues("realm.name", "kerb1"),
|
||||||
singletonList("superuser"),
|
singletonList("superuser"),
|
||||||
Collections.emptyList(),
|
Collections.emptyList(),
|
||||||
metadata,
|
Map.of("a", "b"),
|
||||||
true
|
true
|
||||||
);
|
);
|
||||||
break;
|
case 4 -> new ExpressionRoleMapping(
|
||||||
case 4:
|
|
||||||
mutated = new ExpressionRoleMapping(
|
|
||||||
"kerberosmapping",
|
"kerberosmapping",
|
||||||
FieldRoleMapperExpression.ofKeyValues("realm.name", "kerb1"),
|
FieldRoleMapperExpression.ofKeyValues("realm.name", "kerb1"),
|
||||||
Collections.emptyList(),
|
Collections.emptyList(),
|
||||||
|
@ -132,9 +119,7 @@ public class ExpressionRoleMappingTests extends ESTestCase {
|
||||||
null,
|
null,
|
||||||
true
|
true
|
||||||
);
|
);
|
||||||
break;
|
case 5 -> new ExpressionRoleMapping(
|
||||||
case 5:
|
|
||||||
mutated = new ExpressionRoleMapping(
|
|
||||||
"kerberosmapping",
|
"kerberosmapping",
|
||||||
FieldRoleMapperExpression.ofKeyValues("realm.name", "kerb1"),
|
FieldRoleMapperExpression.ofKeyValues("realm.name", "kerb1"),
|
||||||
singletonList("superuser"),
|
singletonList("superuser"),
|
||||||
|
@ -142,8 +127,7 @@ public class ExpressionRoleMappingTests extends ESTestCase {
|
||||||
null,
|
null,
|
||||||
false
|
false
|
||||||
);
|
);
|
||||||
break;
|
default -> throw new UnsupportedOperationException();
|
||||||
}
|
};
|
||||||
return mutated;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -104,12 +104,10 @@ public class GetApiKeyResponseTests extends ESTestCase {
|
||||||
"user-b",
|
"user-b",
|
||||||
"realm-y"
|
"realm-y"
|
||||||
);
|
);
|
||||||
switch (randomIntBetween(0, 2)) {
|
return switch (randomIntBetween(0, 2)) {
|
||||||
case 0:
|
case 0 -> new GetApiKeyResponse(Arrays.asList(apiKeyInfo));
|
||||||
return new GetApiKeyResponse(Arrays.asList(apiKeyInfo));
|
default -> new GetApiKeyResponse(Arrays.asList(apiKeyInfo));
|
||||||
default:
|
};
|
||||||
return new GetApiKeyResponse(Arrays.asList(apiKeyInfo));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static ApiKey createApiKeyInfo(
|
private static ApiKey createApiKeyInfo(
|
||||||
|
|
|
@ -107,11 +107,10 @@ public class GetRoleMappingsResponseTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static GetRoleMappingsResponse mutateTestItem(GetRoleMappingsResponse original) {
|
private static GetRoleMappingsResponse mutateTestItem(GetRoleMappingsResponse original) {
|
||||||
GetRoleMappingsResponse mutated = null;
|
ExpressionRoleMapping originalRoleMapping = original.getMappings().get(0);
|
||||||
switch (randomIntBetween(0, 1)) {
|
return switch (randomIntBetween(0, 1)) {
|
||||||
case 0:
|
case 0 -> new GetRoleMappingsResponse(
|
||||||
final List<ExpressionRoleMapping> roleMappingsList1 = new ArrayList<>();
|
List.of(
|
||||||
roleMappingsList1.add(
|
|
||||||
new ExpressionRoleMapping(
|
new ExpressionRoleMapping(
|
||||||
"ldapmapping",
|
"ldapmapping",
|
||||||
FieldRoleMapperExpression.ofGroups("cn=ipausers,cn=groups,cn=accounts,dc=ipademo,dc=local"),
|
FieldRoleMapperExpression.ofGroups("cn=ipausers,cn=groups,cn=accounts,dc=ipademo,dc=local"),
|
||||||
|
@ -120,13 +119,10 @@ public class GetRoleMappingsResponseTests extends ESTestCase {
|
||||||
null,
|
null,
|
||||||
false
|
false
|
||||||
)
|
)
|
||||||
|
)
|
||||||
);
|
);
|
||||||
mutated = new GetRoleMappingsResponse(roleMappingsList1);
|
default -> new GetRoleMappingsResponse(
|
||||||
break;
|
List.of(
|
||||||
case 1:
|
|
||||||
final List<ExpressionRoleMapping> roleMappingsList2 = new ArrayList<>();
|
|
||||||
ExpressionRoleMapping originalRoleMapping = original.getMappings().get(0);
|
|
||||||
roleMappingsList2.add(
|
|
||||||
new ExpressionRoleMapping(
|
new ExpressionRoleMapping(
|
||||||
originalRoleMapping.getName(),
|
originalRoleMapping.getName(),
|
||||||
FieldRoleMapperExpression.ofGroups("cn=ipausers,cn=groups,cn=accounts,dc=ipademo,dc=local"),
|
FieldRoleMapperExpression.ofGroups("cn=ipausers,cn=groups,cn=accounts,dc=ipademo,dc=local"),
|
||||||
|
@ -135,10 +131,8 @@ public class GetRoleMappingsResponseTests extends ESTestCase {
|
||||||
originalRoleMapping.getMetadata(),
|
originalRoleMapping.getMetadata(),
|
||||||
originalRoleMapping.isEnabled() == false
|
originalRoleMapping.isEnabled() == false
|
||||||
)
|
)
|
||||||
|
)
|
||||||
);
|
);
|
||||||
mutated = new GetRoleMappingsResponse(roleMappingsList2);
|
};
|
||||||
break;
|
|
||||||
}
|
|
||||||
return mutated;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,17 +37,15 @@ public class GetServiceAccountCredentialsRequestTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private GetServiceAccountCredentialsRequest mutateInstance(GetServiceAccountCredentialsRequest request) {
|
private GetServiceAccountCredentialsRequest mutateInstance(GetServiceAccountCredentialsRequest request) {
|
||||||
switch (randomIntBetween(0, 1)) {
|
return switch (randomIntBetween(0, 1)) {
|
||||||
case 0:
|
case 0 -> new GetServiceAccountCredentialsRequest(
|
||||||
return new GetServiceAccountCredentialsRequest(
|
|
||||||
randomValueOtherThan(request.getNamespace(), () -> randomAlphaOfLengthBetween(3, 8)),
|
randomValueOtherThan(request.getNamespace(), () -> randomAlphaOfLengthBetween(3, 8)),
|
||||||
request.getServiceName()
|
request.getServiceName()
|
||||||
);
|
);
|
||||||
default:
|
default -> new GetServiceAccountCredentialsRequest(
|
||||||
return new GetServiceAccountCredentialsRequest(
|
|
||||||
request.getNamespace(),
|
request.getNamespace(),
|
||||||
randomValueOtherThan(request.getServiceName(), () -> randomAlphaOfLengthBetween(3, 8))
|
randomValueOtherThan(request.getServiceName(), () -> randomAlphaOfLengthBetween(3, 8))
|
||||||
);
|
);
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -54,17 +54,15 @@ public class GetServiceAccountsRequestTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private GetServiceAccountsRequest mutateInstance(GetServiceAccountsRequest request) {
|
private GetServiceAccountsRequest mutateInstance(GetServiceAccountsRequest request) {
|
||||||
switch (randomIntBetween(0, 1)) {
|
return switch (randomIntBetween(0, 1)) {
|
||||||
case 0:
|
case 0 -> new GetServiceAccountsRequest(
|
||||||
return new GetServiceAccountsRequest(
|
|
||||||
randomValueOtherThan(request.getNamespace(), () -> randomAlphaOfLengthBetween(3, 8)),
|
randomValueOtherThan(request.getNamespace(), () -> randomAlphaOfLengthBetween(3, 8)),
|
||||||
request.getServiceName()
|
request.getServiceName()
|
||||||
);
|
);
|
||||||
default:
|
default -> new GetServiceAccountsRequest(
|
||||||
return new GetServiceAccountsRequest(
|
|
||||||
request.getNamespace(),
|
request.getNamespace(),
|
||||||
randomValueOtherThan(request.getServiceName(), () -> randomAlphaOfLengthBetween(3, 8))
|
randomValueOtherThan(request.getServiceName(), () -> randomAlphaOfLengthBetween(3, 8))
|
||||||
);
|
);
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -96,14 +96,11 @@ public class GrantApiKeyRequestTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private GrantApiKeyRequest.Grant clone(GrantApiKeyRequest.Grant grant) {
|
private GrantApiKeyRequest.Grant clone(GrantApiKeyRequest.Grant grant) {
|
||||||
switch (grant.getGrantType()) {
|
return switch (grant.getGrantType()) {
|
||||||
case "password":
|
case "password" -> GrantApiKeyRequest.Grant.passwordGrant(grant.getUsername(), grant.getPassword());
|
||||||
return GrantApiKeyRequest.Grant.passwordGrant(grant.getUsername(), grant.getPassword());
|
case "access_token" -> GrantApiKeyRequest.Grant.accessTokenGrant(grant.getAccessToken());
|
||||||
case "access_token":
|
default -> throw new IllegalArgumentException("Cannot clone grant: " + Strings.toString(grant));
|
||||||
return GrantApiKeyRequest.Grant.accessTokenGrant(grant.getAccessToken());
|
};
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("Cannot clone grant: " + Strings.toString(grant));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private CreateApiKeyRequest clone(CreateApiKeyRequest apiKeyRequest) {
|
private CreateApiKeyRequest clone(CreateApiKeyRequest apiKeyRequest) {
|
||||||
|
@ -117,16 +114,14 @@ public class GrantApiKeyRequestTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static GrantApiKeyRequest mutateTestItem(GrantApiKeyRequest original) {
|
private static GrantApiKeyRequest mutateTestItem(GrantApiKeyRequest original) {
|
||||||
switch (randomIntBetween(0, 3)) {
|
return switch (randomIntBetween(0, 3)) {
|
||||||
case 0:
|
case 0 -> new GrantApiKeyRequest(
|
||||||
return new GrantApiKeyRequest(
|
|
||||||
original.getGrant().getGrantType().equals("password")
|
original.getGrant().getGrantType().equals("password")
|
||||||
? GrantApiKeyRequest.Grant.accessTokenGrant(randomAlphaOfLength(24))
|
? GrantApiKeyRequest.Grant.accessTokenGrant(randomAlphaOfLength(24))
|
||||||
: GrantApiKeyRequest.Grant.passwordGrant(randomAlphaOfLength(8), randomAlphaOfLengthBetween(6, 12).toCharArray()),
|
: GrantApiKeyRequest.Grant.passwordGrant(randomAlphaOfLength(8), randomAlphaOfLengthBetween(6, 12).toCharArray()),
|
||||||
original.getApiKeyRequest()
|
original.getApiKeyRequest()
|
||||||
);
|
);
|
||||||
case 1:
|
case 1 -> new GrantApiKeyRequest(
|
||||||
return new GrantApiKeyRequest(
|
|
||||||
original.getGrant(),
|
original.getGrant(),
|
||||||
new CreateApiKeyRequest(
|
new CreateApiKeyRequest(
|
||||||
randomAlphaOfLengthBetween(10, 15),
|
randomAlphaOfLengthBetween(10, 15),
|
||||||
|
@ -136,8 +131,7 @@ public class GrantApiKeyRequestTests extends ESTestCase {
|
||||||
original.getApiKeyRequest().getMetadata()
|
original.getApiKeyRequest().getMetadata()
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
case 2:
|
case 2 -> new GrantApiKeyRequest(
|
||||||
return new GrantApiKeyRequest(
|
|
||||||
original.getGrant(),
|
original.getGrant(),
|
||||||
new CreateApiKeyRequest(
|
new CreateApiKeyRequest(
|
||||||
original.getApiKeyRequest().getName(),
|
original.getApiKeyRequest().getName(),
|
||||||
|
@ -147,8 +141,7 @@ public class GrantApiKeyRequestTests extends ESTestCase {
|
||||||
original.getApiKeyRequest().getMetadata()
|
original.getApiKeyRequest().getMetadata()
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
case 3:
|
case 3 -> new GrantApiKeyRequest(
|
||||||
return new GrantApiKeyRequest(
|
|
||||||
original.getGrant(),
|
original.getGrant(),
|
||||||
new CreateApiKeyRequest(
|
new CreateApiKeyRequest(
|
||||||
original.getApiKeyRequest().getName(),
|
original.getApiKeyRequest().getName(),
|
||||||
|
@ -158,8 +151,7 @@ public class GrantApiKeyRequestTests extends ESTestCase {
|
||||||
randomValueOtherThan(original.getApiKeyRequest().getMetadata(), CreateApiKeyRequestTests::randomMetadata)
|
randomValueOtherThan(original.getApiKeyRequest().getMetadata(), CreateApiKeyRequestTests::randomMetadata)
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
default:
|
default -> new GrantApiKeyRequest(
|
||||||
return new GrantApiKeyRequest(
|
|
||||||
original.getGrant(),
|
original.getGrant(),
|
||||||
new CreateApiKeyRequest(
|
new CreateApiKeyRequest(
|
||||||
original.getApiKeyRequest().getName(),
|
original.getApiKeyRequest().getName(),
|
||||||
|
@ -169,6 +161,6 @@ public class GrantApiKeyRequestTests extends ESTestCase {
|
||||||
original.getApiKeyRequest().getMetadata()
|
original.getApiKeyRequest().getMetadata()
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -114,27 +114,23 @@ public class InvalidateApiKeyResponseTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static InvalidateApiKeyResponse mutateTestItem(InvalidateApiKeyResponse original) {
|
private static InvalidateApiKeyResponse mutateTestItem(InvalidateApiKeyResponse original) {
|
||||||
switch (randomIntBetween(0, 2)) {
|
return switch (randomIntBetween(0, 2)) {
|
||||||
case 0:
|
case 0 -> new InvalidateApiKeyResponse(
|
||||||
return new InvalidateApiKeyResponse(
|
|
||||||
Arrays.asList(randomArray(2, 5, String[]::new, () -> randomAlphaOfLength(5))),
|
Arrays.asList(randomArray(2, 5, String[]::new, () -> randomAlphaOfLength(5))),
|
||||||
original.getPreviouslyInvalidatedApiKeys(),
|
original.getPreviouslyInvalidatedApiKeys(),
|
||||||
original.getErrors()
|
original.getErrors()
|
||||||
);
|
);
|
||||||
case 1:
|
case 1 -> new InvalidateApiKeyResponse(original.getInvalidatedApiKeys(), Collections.emptyList(), original.getErrors());
|
||||||
return new InvalidateApiKeyResponse(original.getInvalidatedApiKeys(), Collections.emptyList(), original.getErrors());
|
case 2 -> new InvalidateApiKeyResponse(
|
||||||
case 2:
|
|
||||||
return new InvalidateApiKeyResponse(
|
|
||||||
original.getInvalidatedApiKeys(),
|
original.getInvalidatedApiKeys(),
|
||||||
original.getPreviouslyInvalidatedApiKeys(),
|
original.getPreviouslyInvalidatedApiKeys(),
|
||||||
Collections.emptyList()
|
Collections.emptyList()
|
||||||
);
|
);
|
||||||
default:
|
default -> new InvalidateApiKeyResponse(
|
||||||
return new InvalidateApiKeyResponse(
|
|
||||||
Arrays.asList(randomArray(2, 5, String[]::new, () -> randomAlphaOfLength(5))),
|
Arrays.asList(randomArray(2, 5, String[]::new, () -> randomAlphaOfLength(5))),
|
||||||
original.getPreviouslyInvalidatedApiKeys(),
|
original.getPreviouslyInvalidatedApiKeys(),
|
||||||
original.getErrors()
|
original.getErrors()
|
||||||
);
|
);
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -72,18 +72,13 @@ public class InvalidateTokenRequestTests extends ESTestCase {
|
||||||
: InvalidateTokenRequest.refreshToken(token);
|
: InvalidateTokenRequest.refreshToken(token);
|
||||||
final EqualsHashCodeTestUtils.MutateFunction<InvalidateTokenRequest> mutate = r -> {
|
final EqualsHashCodeTestUtils.MutateFunction<InvalidateTokenRequest> mutate = r -> {
|
||||||
int randomCase = randomIntBetween(1, 4);
|
int randomCase = randomIntBetween(1, 4);
|
||||||
switch (randomCase) {
|
return switch (randomCase) {
|
||||||
case 1:
|
case 1 -> InvalidateTokenRequest.refreshToken(randomAlphaOfLength(5));
|
||||||
return InvalidateTokenRequest.refreshToken(randomAlphaOfLength(5));
|
case 2 -> InvalidateTokenRequest.accessToken(randomAlphaOfLength(5));
|
||||||
case 2:
|
case 3 -> InvalidateTokenRequest.realmTokens(randomAlphaOfLength(5));
|
||||||
return InvalidateTokenRequest.accessToken(randomAlphaOfLength(5));
|
case 4 -> InvalidateTokenRequest.userTokens(randomAlphaOfLength(5));
|
||||||
case 3:
|
default -> new InvalidateTokenRequest(null, null, randomAlphaOfLength(5), randomAlphaOfLength(5));
|
||||||
return InvalidateTokenRequest.realmTokens(randomAlphaOfLength(5));
|
};
|
||||||
case 4:
|
|
||||||
return InvalidateTokenRequest.userTokens(randomAlphaOfLength(5));
|
|
||||||
default:
|
|
||||||
return new InvalidateTokenRequest(null, null, randomAlphaOfLength(5), randomAlphaOfLength(5));
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
EqualsHashCodeTestUtils.checkEqualsAndHashCode(
|
EqualsHashCodeTestUtils.checkEqualsAndHashCode(
|
||||||
request,
|
request,
|
||||||
|
|
|
@ -63,29 +63,25 @@ public class KibanaEnrollmentResponseTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static KibanaEnrollmentResponse mutateTestItem(KibanaEnrollmentResponse original) {
|
private static KibanaEnrollmentResponse mutateTestItem(KibanaEnrollmentResponse original) {
|
||||||
switch (randomIntBetween(0, 3)) {
|
return switch (randomIntBetween(0, 3)) {
|
||||||
case 0:
|
case 0 -> new KibanaEnrollmentResponse(
|
||||||
return new KibanaEnrollmentResponse(
|
|
||||||
randomAlphaOfLengthBetween(14, 20),
|
randomAlphaOfLengthBetween(14, 20),
|
||||||
new SecureString(randomAlphaOfLengthBetween(71, 90).toCharArray()),
|
new SecureString(randomAlphaOfLengthBetween(71, 90).toCharArray()),
|
||||||
randomAlphaOfLength(52)
|
randomAlphaOfLength(52)
|
||||||
);
|
);
|
||||||
case 1:
|
case 1 -> new KibanaEnrollmentResponse(
|
||||||
return new KibanaEnrollmentResponse(
|
|
||||||
original.getTokenName(),
|
original.getTokenName(),
|
||||||
new SecureString(randomAlphaOfLengthBetween(71, 90).toCharArray()),
|
new SecureString(randomAlphaOfLengthBetween(71, 90).toCharArray()),
|
||||||
randomAlphaOfLength(52)
|
randomAlphaOfLength(52)
|
||||||
);
|
);
|
||||||
case 2:
|
case 2 -> new KibanaEnrollmentResponse(randomAlphaOfLengthBetween(14, 20), original.getTokenValue(), randomAlphaOfLength(52));
|
||||||
return new KibanaEnrollmentResponse(randomAlphaOfLengthBetween(14, 20), original.getTokenValue(), randomAlphaOfLength(52));
|
case 3 -> new KibanaEnrollmentResponse(
|
||||||
case 3:
|
|
||||||
return new KibanaEnrollmentResponse(
|
|
||||||
randomAlphaOfLengthBetween(14, 20),
|
randomAlphaOfLengthBetween(14, 20),
|
||||||
new SecureString(randomAlphaOfLengthBetween(71, 90).toCharArray()),
|
new SecureString(randomAlphaOfLengthBetween(71, 90).toCharArray()),
|
||||||
original.getHttpCa()
|
original.getHttpCa()
|
||||||
);
|
);
|
||||||
}
|
|
||||||
// we never reach here
|
// we never reach here
|
||||||
return null;
|
default -> null;
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -78,74 +78,61 @@ public class QueryApiKeyRequestTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private QueryApiKeyRequest mutateInstance(QueryApiKeyRequest request) {
|
private QueryApiKeyRequest mutateInstance(QueryApiKeyRequest request) {
|
||||||
switch (randomIntBetween(0, 5)) {
|
return switch (randomIntBetween(0, 5)) {
|
||||||
case 0:
|
case 0 -> new QueryApiKeyRequest(
|
||||||
return new QueryApiKeyRequest(
|
|
||||||
randomValueOtherThan(request.getQueryBuilder(), QueryApiKeyRequestTests::randomQueryBuilder),
|
randomValueOtherThan(request.getQueryBuilder(), QueryApiKeyRequestTests::randomQueryBuilder),
|
||||||
request.getFrom(),
|
request.getFrom(),
|
||||||
request.getSize(),
|
request.getSize(),
|
||||||
request.getFieldSortBuilders(),
|
request.getFieldSortBuilders(),
|
||||||
request.getSearchAfterBuilder()
|
request.getSearchAfterBuilder()
|
||||||
);
|
);
|
||||||
case 1:
|
case 1 -> new QueryApiKeyRequest(
|
||||||
return new QueryApiKeyRequest(
|
|
||||||
request.getQueryBuilder(),
|
request.getQueryBuilder(),
|
||||||
request.getFrom() + 1,
|
request.getFrom() + 1,
|
||||||
request.getSize(),
|
request.getSize(),
|
||||||
request.getFieldSortBuilders(),
|
request.getFieldSortBuilders(),
|
||||||
request.getSearchAfterBuilder()
|
request.getSearchAfterBuilder()
|
||||||
);
|
);
|
||||||
case 2:
|
case 2 -> new QueryApiKeyRequest(
|
||||||
return new QueryApiKeyRequest(
|
|
||||||
request.getQueryBuilder(),
|
request.getQueryBuilder(),
|
||||||
request.getFrom(),
|
request.getFrom(),
|
||||||
request.getSize() + 1,
|
request.getSize() + 1,
|
||||||
request.getFieldSortBuilders(),
|
request.getFieldSortBuilders(),
|
||||||
request.getSearchAfterBuilder()
|
request.getSearchAfterBuilder()
|
||||||
);
|
);
|
||||||
case 3:
|
case 3 -> new QueryApiKeyRequest(
|
||||||
return new QueryApiKeyRequest(
|
|
||||||
request.getQueryBuilder(),
|
request.getQueryBuilder(),
|
||||||
request.getFrom(),
|
request.getFrom(),
|
||||||
request.getSize(),
|
request.getSize(),
|
||||||
randomValueOtherThan(request.getFieldSortBuilders(), QueryApiKeyRequestTests::randomFieldSortBuilders),
|
randomValueOtherThan(request.getFieldSortBuilders(), QueryApiKeyRequestTests::randomFieldSortBuilders),
|
||||||
request.getSearchAfterBuilder()
|
request.getSearchAfterBuilder()
|
||||||
);
|
);
|
||||||
default:
|
default -> new QueryApiKeyRequest(
|
||||||
return new QueryApiKeyRequest(
|
|
||||||
request.getQueryBuilder(),
|
request.getQueryBuilder(),
|
||||||
request.getFrom(),
|
request.getFrom(),
|
||||||
request.getSize(),
|
request.getSize(),
|
||||||
request.getFieldSortBuilders(),
|
request.getFieldSortBuilders(),
|
||||||
randomValueOtherThan(request.getSearchAfterBuilder(), QueryApiKeyRequestTests::randomSearchAfterBuilder)
|
randomValueOtherThan(request.getSearchAfterBuilder(), QueryApiKeyRequestTests::randomSearchAfterBuilder)
|
||||||
);
|
);
|
||||||
|
};
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static QueryBuilder randomQueryBuilder() {
|
public static QueryBuilder randomQueryBuilder() {
|
||||||
switch (randomIntBetween(0, 5)) {
|
return switch (randomIntBetween(0, 5)) {
|
||||||
case 0:
|
case 0 -> QueryBuilders.matchAllQuery();
|
||||||
return QueryBuilders.matchAllQuery();
|
case 1 -> QueryBuilders.termQuery(
|
||||||
case 1:
|
|
||||||
return QueryBuilders.termQuery(
|
|
||||||
randomAlphaOfLengthBetween(3, 8),
|
randomAlphaOfLengthBetween(3, 8),
|
||||||
randomFrom(randomAlphaOfLength(8), randomInt(), randomLong(), randomDouble(), randomFloat())
|
randomFrom(randomAlphaOfLength(8), randomInt(), randomLong(), randomDouble(), randomFloat())
|
||||||
);
|
);
|
||||||
case 2:
|
case 2 -> QueryBuilders.idsQuery().addIds(randomArray(1, 5, String[]::new, () -> randomAlphaOfLength(20)));
|
||||||
return QueryBuilders.idsQuery().addIds(randomArray(1, 5, String[]::new, () -> randomAlphaOfLength(20)));
|
case 3 -> QueryBuilders.prefixQuery(randomAlphaOfLengthBetween(3, 8), randomAlphaOfLengthBetween(3, 8));
|
||||||
case 3:
|
case 4 -> QueryBuilders.wildcardQuery(
|
||||||
return QueryBuilders.prefixQuery(randomAlphaOfLengthBetween(3, 8), randomAlphaOfLengthBetween(3, 8));
|
|
||||||
case 4:
|
|
||||||
return QueryBuilders.wildcardQuery(
|
|
||||||
randomAlphaOfLengthBetween(3, 8),
|
randomAlphaOfLengthBetween(3, 8),
|
||||||
randomAlphaOfLengthBetween(0, 3) + "*" + randomAlphaOfLengthBetween(0, 3)
|
randomAlphaOfLengthBetween(0, 3) + "*" + randomAlphaOfLengthBetween(0, 3)
|
||||||
);
|
);
|
||||||
case 5:
|
case 5 -> QueryBuilders.rangeQuery(randomAlphaOfLengthBetween(3, 8)).from(randomNonNegativeLong()).to(randomNonNegativeLong());
|
||||||
return QueryBuilders.rangeQuery(randomAlphaOfLengthBetween(3, 8)).from(randomNonNegativeLong()).to(randomNonNegativeLong());
|
default -> null;
|
||||||
default:
|
};
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static List<FieldSortBuilder> randomFieldSortBuilders() {
|
public static List<FieldSortBuilder> randomFieldSortBuilders() {
|
||||||
|
|
|
@ -61,19 +61,15 @@ public class GlobalOperationPrivilegeTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static GlobalOperationPrivilege mutateTestItem(GlobalOperationPrivilege original) {
|
private static GlobalOperationPrivilege mutateTestItem(GlobalOperationPrivilege original) {
|
||||||
switch (randomIntBetween(0, 2)) {
|
return switch (randomIntBetween(0, 2)) {
|
||||||
case 0:
|
case 0 -> new GlobalOperationPrivilege(randomAlphaOfLength(5), original.getOperation(), original.getRaw());
|
||||||
return new GlobalOperationPrivilege(randomAlphaOfLength(5), original.getOperation(), original.getRaw());
|
case 1 -> new GlobalOperationPrivilege(original.getCategory(), randomAlphaOfLength(5), original.getRaw());
|
||||||
case 1:
|
case 2 -> new GlobalOperationPrivilege(
|
||||||
return new GlobalOperationPrivilege(original.getCategory(), randomAlphaOfLength(5), original.getRaw());
|
|
||||||
case 2:
|
|
||||||
return new GlobalOperationPrivilege(
|
|
||||||
original.getCategory(),
|
original.getCategory(),
|
||||||
original.getOperation(),
|
original.getOperation(),
|
||||||
Collections.singletonMap(randomAlphaOfLength(4), randomAlphaOfLength(4))
|
Collections.singletonMap(randomAlphaOfLength(4), randomAlphaOfLength(4))
|
||||||
);
|
);
|
||||||
default:
|
default -> new GlobalOperationPrivilege(randomAlphaOfLength(5), original.getOperation(), original.getRaw());
|
||||||
return new GlobalOperationPrivilege(randomAlphaOfLength(5), original.getOperation(), original.getRaw());
|
};
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -72,23 +72,13 @@ public class TransformConfigTests extends AbstractXContentTestCase<TransformConf
|
||||||
public static Map<String, Object> randomMetadata() {
|
public static Map<String, Object> randomMetadata() {
|
||||||
return randomMap(0, 10, () -> {
|
return randomMap(0, 10, () -> {
|
||||||
String key = randomAlphaOfLengthBetween(1, 10);
|
String key = randomAlphaOfLengthBetween(1, 10);
|
||||||
Object value;
|
Object value = switch (randomIntBetween(0, 3)) {
|
||||||
switch (randomIntBetween(0, 3)) {
|
case 0 -> null;
|
||||||
case 0:
|
case 1 -> randomLong();
|
||||||
value = null;
|
case 2 -> randomAlphaOfLengthBetween(1, 10);
|
||||||
break;
|
case 3 -> randomMap(0, 10, () -> Tuple.tuple(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)));
|
||||||
case 1:
|
default -> throw new AssertionError();
|
||||||
value = randomLong();
|
};
|
||||||
break;
|
|
||||||
case 2:
|
|
||||||
value = randomAlphaOfLengthBetween(1, 10);
|
|
||||||
break;
|
|
||||||
case 3:
|
|
||||||
value = randomMap(0, 10, () -> Tuple.tuple(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)));
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new AssertionError();
|
|
||||||
}
|
|
||||||
return Tuple.tuple(key, value);
|
return Tuple.tuple(key, value);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -61,17 +61,13 @@ public class AggregationConfigTests extends AbstractXContentTestCase<Aggregation
|
||||||
|
|
||||||
private static AggregationBuilder getRandomSupportedAggregation() {
|
private static AggregationBuilder getRandomSupportedAggregation() {
|
||||||
final int numberOfSupportedAggs = 4;
|
final int numberOfSupportedAggs = 4;
|
||||||
switch (randomIntBetween(1, numberOfSupportedAggs)) {
|
return switch (randomIntBetween(1, numberOfSupportedAggs)) {
|
||||||
case 1:
|
case 1 -> AggregationBuilders.avg(randomAlphaOfLengthBetween(1, 10)).field(randomAlphaOfLengthBetween(1, 10));
|
||||||
return AggregationBuilders.avg(randomAlphaOfLengthBetween(1, 10)).field(randomAlphaOfLengthBetween(1, 10));
|
case 2 -> AggregationBuilders.min(randomAlphaOfLengthBetween(1, 10)).field(randomAlphaOfLengthBetween(1, 10));
|
||||||
case 2:
|
case 3 -> AggregationBuilders.max(randomAlphaOfLengthBetween(1, 10)).field(randomAlphaOfLengthBetween(1, 10));
|
||||||
return AggregationBuilders.min(randomAlphaOfLengthBetween(1, 10)).field(randomAlphaOfLengthBetween(1, 10));
|
case 4 -> AggregationBuilders.sum(randomAlphaOfLengthBetween(1, 10)).field(randomAlphaOfLengthBetween(1, 10));
|
||||||
case 3:
|
default -> null;
|
||||||
return AggregationBuilders.max(randomAlphaOfLengthBetween(1, 10)).field(randomAlphaOfLengthBetween(1, 10));
|
};
|
||||||
case 4:
|
|
||||||
return AggregationBuilders.sum(randomAlphaOfLengthBetween(1, 10)).field(randomAlphaOfLengthBetween(1, 10));
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,24 +34,13 @@ public class GroupConfigTests extends AbstractXContentTestCase<GroupConfig> {
|
||||||
for (int i = 0; i < randomIntBetween(1, 4); ++i) {
|
for (int i = 0; i < randomIntBetween(1, 4); ++i) {
|
||||||
String targetFieldName = randomAlphaOfLengthBetween(1, 20);
|
String targetFieldName = randomAlphaOfLengthBetween(1, 20);
|
||||||
if (names.add(targetFieldName)) {
|
if (names.add(targetFieldName)) {
|
||||||
SingleGroupSource groupBy = null;
|
|
||||||
SingleGroupSource.Type type = randomFrom(SingleGroupSource.Type.values());
|
SingleGroupSource.Type type = randomFrom(SingleGroupSource.Type.values());
|
||||||
switch (type) {
|
SingleGroupSource groupBy = switch (type) {
|
||||||
case TERMS:
|
case TERMS -> TermsGroupSourceTests.randomTermsGroupSource();
|
||||||
groupBy = TermsGroupSourceTests.randomTermsGroupSource();
|
case HISTOGRAM -> HistogramGroupSourceTests.randomHistogramGroupSource();
|
||||||
break;
|
case DATE_HISTOGRAM -> DateHistogramGroupSourceTests.randomDateHistogramGroupSource();
|
||||||
case HISTOGRAM:
|
case GEOTILE_GRID -> GeoTileGroupSourceTests.randomGeoTileGroupSource();
|
||||||
groupBy = HistogramGroupSourceTests.randomHistogramGroupSource();
|
};
|
||||||
break;
|
|
||||||
case DATE_HISTOGRAM:
|
|
||||||
groupBy = DateHistogramGroupSourceTests.randomDateHistogramGroupSource();
|
|
||||||
break;
|
|
||||||
case GEOTILE_GRID:
|
|
||||||
groupBy = GeoTileGroupSourceTests.randomGeoTileGroupSource();
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
fail("unknown group source type, please implement tests and add support here");
|
|
||||||
}
|
|
||||||
groups.put(targetFieldName, groupBy);
|
groups.put(targetFieldName, groupBy);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -264,15 +264,12 @@ public class InstallPluginAction implements Closeable {
|
||||||
|
|
||||||
private static void handleInstallXPack(final Build.Flavor flavor) throws UserException {
|
private static void handleInstallXPack(final Build.Flavor flavor) throws UserException {
|
||||||
switch (flavor) {
|
switch (flavor) {
|
||||||
case DEFAULT:
|
case DEFAULT -> throw new UserException(ExitCodes.CONFIG, "this distribution of Elasticsearch contains X-Pack by default");
|
||||||
throw new UserException(ExitCodes.CONFIG, "this distribution of Elasticsearch contains X-Pack by default");
|
case OSS -> throw new UserException(
|
||||||
case OSS:
|
|
||||||
throw new UserException(
|
|
||||||
ExitCodes.CONFIG,
|
ExitCodes.CONFIG,
|
||||||
"X-Pack is not available with the oss distribution; to use X-Pack features use the default distribution"
|
"X-Pack is not available with the oss distribution; to use X-Pack features use the default distribution"
|
||||||
);
|
);
|
||||||
case UNKNOWN:
|
case UNKNOWN -> throw new IllegalStateException("your distribution is broken");
|
||||||
throw new IllegalStateException("your distribution is broken");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -118,16 +118,10 @@ public final class Booleans {
|
||||||
if (value == null) {
|
if (value == null) {
|
||||||
return defaultValue;
|
return defaultValue;
|
||||||
}
|
}
|
||||||
switch (value) {
|
return switch (value) {
|
||||||
case "false":
|
case "false", "0", "off", "no" -> false;
|
||||||
case "0":
|
default -> true;
|
||||||
case "off":
|
};
|
||||||
case "no":
|
|
||||||
return false;
|
|
||||||
|
|
||||||
default:
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -318,24 +318,15 @@ public class TimeValue implements Comparable<TimeValue> {
|
||||||
if (duration < 0) {
|
if (duration < 0) {
|
||||||
return Long.toString(duration);
|
return Long.toString(duration);
|
||||||
}
|
}
|
||||||
switch (timeUnit) {
|
return switch (timeUnit) {
|
||||||
case NANOSECONDS:
|
case NANOSECONDS -> duration + "nanos";
|
||||||
return duration + "nanos";
|
case MICROSECONDS -> duration + "micros";
|
||||||
case MICROSECONDS:
|
case MILLISECONDS -> duration + "ms";
|
||||||
return duration + "micros";
|
case SECONDS -> duration + "s";
|
||||||
case MILLISECONDS:
|
case MINUTES -> duration + "m";
|
||||||
return duration + "ms";
|
case HOURS -> duration + "h";
|
||||||
case SECONDS:
|
case DAYS -> duration + "d";
|
||||||
return duration + "s";
|
};
|
||||||
case MINUTES:
|
|
||||||
return duration + "m";
|
|
||||||
case HOURS:
|
|
||||||
return duration + "h";
|
|
||||||
case DAYS:
|
|
||||||
return duration + "d";
|
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("unknown time unit: " + timeUnit.name());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static TimeValue parseTimeValue(String sValue, String settingName) {
|
public static TimeValue parseTimeValue(String sValue, String settingName) {
|
||||||
|
|
|
@ -60,22 +60,13 @@ final class DissectMatch {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
switch (key.getModifier()) {
|
switch (key.getModifier()) {
|
||||||
case NONE:
|
case NONE -> simpleResults.put(key.getName(), value);
|
||||||
simpleResults.put(key.getName(), value);
|
case APPEND -> appendResults.computeIfAbsent(key.getName(), k -> new AppendResult(appendSeparator))
|
||||||
break;
|
.addValue(value, implicitAppendOrder++);
|
||||||
case APPEND:
|
case APPEND_WITH_ORDER -> appendResults.computeIfAbsent(key.getName(), k -> new AppendResult(appendSeparator))
|
||||||
appendResults.computeIfAbsent(key.getName(), k -> new AppendResult(appendSeparator)).addValue(value, implicitAppendOrder++);
|
|
||||||
break;
|
|
||||||
case APPEND_WITH_ORDER:
|
|
||||||
appendResults.computeIfAbsent(key.getName(), k -> new AppendResult(appendSeparator))
|
|
||||||
.addValue(value, key.getAppendPosition());
|
.addValue(value, key.getAppendPosition());
|
||||||
break;
|
case FIELD_NAME -> referenceResults.computeIfAbsent(key.getName(), k -> new ReferenceResult()).setKey(value);
|
||||||
case FIELD_NAME:
|
case FIELD_VALUE -> referenceResults.computeIfAbsent(key.getName(), k -> new ReferenceResult()).setValue(value);
|
||||||
referenceResults.computeIfAbsent(key.getName(), k -> new ReferenceResult()).setKey(value);
|
|
||||||
break;
|
|
||||||
case FIELD_VALUE:
|
|
||||||
referenceResults.computeIfAbsent(key.getName(), k -> new ReferenceResult()).setValue(value);
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -506,17 +506,13 @@ public class WellKnownText {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static String tokenString(StreamTokenizer stream) {
|
private static String tokenString(StreamTokenizer stream) {
|
||||||
switch (stream.ttype) {
|
return switch (stream.ttype) {
|
||||||
case StreamTokenizer.TT_WORD:
|
case StreamTokenizer.TT_WORD -> stream.sval;
|
||||||
return stream.sval;
|
case StreamTokenizer.TT_EOF -> EOF;
|
||||||
case StreamTokenizer.TT_EOF:
|
case StreamTokenizer.TT_EOL -> EOL;
|
||||||
return EOF;
|
case StreamTokenizer.TT_NUMBER -> NUMBER;
|
||||||
case StreamTokenizer.TT_EOL:
|
default -> "'" + (char) stream.ttype + "'";
|
||||||
return EOL;
|
};
|
||||||
case StreamTokenizer.TT_NUMBER:
|
|
||||||
return NUMBER;
|
|
||||||
}
|
|
||||||
return "'" + (char) stream.ttype + "'";
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static boolean isNumberNext(StreamTokenizer stream) throws IOException {
|
private static boolean isNumberNext(StreamTokenizer stream) throws IOException {
|
||||||
|
|
|
@ -58,22 +58,15 @@ enum GrokCaptureType {
|
||||||
abstract <T> T nativeExtracter(int[] backRefs, NativeExtracterMap<T> map);
|
abstract <T> T nativeExtracter(int[] backRefs, NativeExtracterMap<T> map);
|
||||||
|
|
||||||
static GrokCaptureType fromString(String str) {
|
static GrokCaptureType fromString(String str) {
|
||||||
switch (str) {
|
return switch (str) {
|
||||||
case "string":
|
case "string" -> STRING;
|
||||||
return STRING;
|
case "int" -> INTEGER;
|
||||||
case "int":
|
case "long" -> LONG;
|
||||||
return INTEGER;
|
case "float" -> FLOAT;
|
||||||
case "long":
|
case "double" -> DOUBLE;
|
||||||
return LONG;
|
case "boolean" -> BOOLEAN;
|
||||||
case "float":
|
default -> STRING;
|
||||||
return FLOAT;
|
};
|
||||||
case "double":
|
|
||||||
return DOUBLE;
|
|
||||||
case "boolean":
|
|
||||||
return BOOLEAN;
|
|
||||||
default:
|
|
||||||
return STRING;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
protected final GrokCaptureExtracter rawExtracter(int[] backRefs, Consumer<? super String> emit) {
|
protected final GrokCaptureExtracter rawExtracter(int[] backRefs, Consumer<? super String> emit) {
|
||||||
|
|
|
@ -74,27 +74,15 @@ enum LZ4SafeUtils {
|
||||||
int dec = 0;
|
int dec = 0;
|
||||||
assert dOff >= matchOff && dOff - matchOff < 8;
|
assert dOff >= matchOff && dOff - matchOff < 8;
|
||||||
switch (dOff - matchOff) {
|
switch (dOff - matchOff) {
|
||||||
case 1:
|
case 1 -> matchOff -= 3;
|
||||||
matchOff -= 3;
|
case 2 -> matchOff -= 2;
|
||||||
break;
|
case 3 -> {
|
||||||
case 2:
|
|
||||||
matchOff -= 2;
|
|
||||||
break;
|
|
||||||
case 3:
|
|
||||||
matchOff -= 3;
|
matchOff -= 3;
|
||||||
dec = -1;
|
dec = -1;
|
||||||
break;
|
}
|
||||||
case 5:
|
case 5 -> dec = 1;
|
||||||
dec = 1;
|
case 6 -> dec = 2;
|
||||||
break;
|
case 7 -> dec = 3;
|
||||||
case 6:
|
|
||||||
dec = 2;
|
|
||||||
break;
|
|
||||||
case 7:
|
|
||||||
dec = 3;
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
copy4Bytes(dest, matchOff, dest, dOff);
|
copy4Bytes(dest, matchOff, dest, dOff);
|
||||||
|
|
|
@ -268,17 +268,15 @@ public final class DerParser {
|
||||||
}
|
}
|
||||||
StringBuilder sb = new StringBuilder(64);
|
StringBuilder sb = new StringBuilder(64);
|
||||||
switch (value[0] / 40) {
|
switch (value[0] / 40) {
|
||||||
case 0:
|
case 0 -> sb.append('0');
|
||||||
sb.append('0');
|
case 1 -> {
|
||||||
break;
|
|
||||||
case 1:
|
|
||||||
sb.append('1');
|
sb.append('1');
|
||||||
value[0] -= 40;
|
value[0] -= 40;
|
||||||
break;
|
}
|
||||||
default:
|
default -> {
|
||||||
sb.append('2');
|
sb.append('2');
|
||||||
value[0] -= 80;
|
value[0] -= 80;
|
||||||
break;
|
}
|
||||||
}
|
}
|
||||||
int oidPart = 0;
|
int oidPart = 0;
|
||||||
for (int i = 0; i < length; i++) {
|
for (int i = 0; i < length; i++) {
|
||||||
|
|
|
@ -654,17 +654,14 @@ public final class PemUtils {
|
||||||
DerParser.Asn1Object algSequence = parser.readAsn1Object();
|
DerParser.Asn1Object algSequence = parser.readAsn1Object();
|
||||||
parser = algSequence.getParser();
|
parser = algSequence.getParser();
|
||||||
String oidString = parser.readAsn1Object().getOid();
|
String oidString = parser.readAsn1Object().getOid();
|
||||||
switch (oidString) {
|
return switch (oidString) {
|
||||||
case "1.2.840.10040.4.1":
|
case "1.2.840.10040.4.1" -> "DSA";
|
||||||
return "DSA";
|
case "1.2.840.113549.1.1.1" -> "RSA";
|
||||||
case "1.2.840.113549.1.1.1":
|
case "1.2.840.10045.2.1" -> "EC";
|
||||||
return "RSA";
|
default -> throw new GeneralSecurityException(
|
||||||
case "1.2.840.10045.2.1":
|
|
||||||
return "EC";
|
|
||||||
}
|
|
||||||
throw new GeneralSecurityException(
|
|
||||||
"Error parsing key algorithm identifier. Algorithm with OID [" + oidString + "] is not supported"
|
"Error parsing key algorithm identifier. Algorithm with OID [" + oidString + "] is not supported"
|
||||||
);
|
);
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
public static List<Certificate> readCertificates(Collection<Path> certPaths) throws CertificateException, IOException {
|
public static List<Certificate> readCertificates(Collection<Path> certPaths) throws CertificateException, IOException {
|
||||||
|
@ -683,92 +680,55 @@ public final class PemUtils {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static String getAlgorithmNameFromOid(String oidString) throws GeneralSecurityException {
|
private static String getAlgorithmNameFromOid(String oidString) throws GeneralSecurityException {
|
||||||
switch (oidString) {
|
return switch (oidString) {
|
||||||
case "1.2.840.10040.4.1":
|
case "1.2.840.10040.4.1" -> "DSA";
|
||||||
return "DSA";
|
case "1.2.840.113549.1.1.1" -> "RSA";
|
||||||
case "1.2.840.113549.1.1.1":
|
case "1.2.840.10045.2.1" -> "EC";
|
||||||
return "RSA";
|
case "1.3.14.3.2.7" -> "DES-CBC";
|
||||||
case "1.2.840.10045.2.1":
|
case "2.16.840.1.101.3.4.1.1" -> "AES-128_ECB";
|
||||||
return "EC";
|
case "2.16.840.1.101.3.4.1.2" -> "AES-128_CBC";
|
||||||
case "1.3.14.3.2.7":
|
case "2.16.840.1.101.3.4.1.3" -> "AES-128_OFB";
|
||||||
return "DES-CBC";
|
case "2.16.840.1.101.3.4.1.4" -> "AES-128_CFB";
|
||||||
case "2.16.840.1.101.3.4.1.1":
|
case "2.16.840.1.101.3.4.1.6" -> "AES-128_GCM";
|
||||||
return "AES-128_ECB";
|
case "2.16.840.1.101.3.4.1.21" -> "AES-192_ECB";
|
||||||
case "2.16.840.1.101.3.4.1.2":
|
case "2.16.840.1.101.3.4.1.22" -> "AES-192_CBC";
|
||||||
return "AES-128_CBC";
|
case "2.16.840.1.101.3.4.1.23" -> "AES-192_OFB";
|
||||||
case "2.16.840.1.101.3.4.1.3":
|
case "2.16.840.1.101.3.4.1.24" -> "AES-192_CFB";
|
||||||
return "AES-128_OFB";
|
case "2.16.840.1.101.3.4.1.26" -> "AES-192_GCM";
|
||||||
case "2.16.840.1.101.3.4.1.4":
|
case "2.16.840.1.101.3.4.1.41" -> "AES-256_ECB";
|
||||||
return "AES-128_CFB";
|
case "2.16.840.1.101.3.4.1.42" -> "AES-256_CBC";
|
||||||
case "2.16.840.1.101.3.4.1.6":
|
case "2.16.840.1.101.3.4.1.43" -> "AES-256_OFB";
|
||||||
return "AES-128_GCM";
|
case "2.16.840.1.101.3.4.1.44" -> "AES-256_CFB";
|
||||||
case "2.16.840.1.101.3.4.1.21":
|
case "2.16.840.1.101.3.4.1.46" -> "AES-256_GCM";
|
||||||
return "AES-192_ECB";
|
case "2.16.840.1.101.3.4.1.5" -> "AESWrap-128";
|
||||||
case "2.16.840.1.101.3.4.1.22":
|
case "2.16.840.1.101.3.4.1.25" -> "AESWrap-192";
|
||||||
return "AES-192_CBC";
|
case "2.16.840.1.101.3.4.1.45" -> "AESWrap-256";
|
||||||
case "2.16.840.1.101.3.4.1.23":
|
default -> null;
|
||||||
return "AES-192_OFB";
|
};
|
||||||
case "2.16.840.1.101.3.4.1.24":
|
|
||||||
return "AES-192_CFB";
|
|
||||||
case "2.16.840.1.101.3.4.1.26":
|
|
||||||
return "AES-192_GCM";
|
|
||||||
case "2.16.840.1.101.3.4.1.41":
|
|
||||||
return "AES-256_ECB";
|
|
||||||
case "2.16.840.1.101.3.4.1.42":
|
|
||||||
return "AES-256_CBC";
|
|
||||||
case "2.16.840.1.101.3.4.1.43":
|
|
||||||
return "AES-256_OFB";
|
|
||||||
case "2.16.840.1.101.3.4.1.44":
|
|
||||||
return "AES-256_CFB";
|
|
||||||
case "2.16.840.1.101.3.4.1.46":
|
|
||||||
return "AES-256_GCM";
|
|
||||||
case "2.16.840.1.101.3.4.1.5":
|
|
||||||
return "AESWrap-128";
|
|
||||||
case "2.16.840.1.101.3.4.1.25":
|
|
||||||
return "AESWrap-192";
|
|
||||||
case "2.16.840.1.101.3.4.1.45":
|
|
||||||
return "AESWrap-256";
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static String getEcCurveNameFromOid(String oidString) throws GeneralSecurityException {
|
private static String getEcCurveNameFromOid(String oidString) throws GeneralSecurityException {
|
||||||
switch (oidString) {
|
return switch (oidString) {
|
||||||
// see https://tools.ietf.org/html/rfc5480#section-2.1.1.1
|
// see https://tools.ietf.org/html/rfc5480#section-2.1.1.1
|
||||||
case "1.2.840.10045.3.1":
|
case "1.2.840.10045.3.1" -> "secp192r1";
|
||||||
return "secp192r1";
|
case "1.3.132.0.1" -> "sect163k1";
|
||||||
case "1.3.132.0.1":
|
case "1.3.132.0.15" -> "sect163r2";
|
||||||
return "sect163k1";
|
case "1.3.132.0.33" -> "secp224r1";
|
||||||
case "1.3.132.0.15":
|
case "1.3.132.0.26" -> "sect233k1";
|
||||||
return "sect163r2";
|
case "1.3.132.0.27" -> "sect233r1";
|
||||||
case "1.3.132.0.33":
|
case "1.2.840.10045.3.1.7" -> "secp256r1";
|
||||||
return "secp224r1";
|
case "1.3.132.0.16" -> "sect283k1";
|
||||||
case "1.3.132.0.26":
|
case "1.3.132.0.17" -> "sect283r1";
|
||||||
return "sect233k1";
|
case "1.3.132.0.34" -> "secp384r1";
|
||||||
case "1.3.132.0.27":
|
case "1.3.132.0.36" -> "sect409k1";
|
||||||
return "sect233r1";
|
case "1.3.132.0.37" -> "sect409r1";
|
||||||
case "1.2.840.10045.3.1.7":
|
case "1.3.132.0.35" -> "secp521r1";
|
||||||
return "secp256r1";
|
case "1.3.132.0.38" -> "sect571k1";
|
||||||
case "1.3.132.0.16":
|
case "1.3.132.0.39" -> "sect571r1";
|
||||||
return "sect283k1";
|
default -> throw new GeneralSecurityException(
|
||||||
case "1.3.132.0.17":
|
|
||||||
return "sect283r1";
|
|
||||||
case "1.3.132.0.34":
|
|
||||||
return "secp384r1";
|
|
||||||
case "1.3.132.0.36":
|
|
||||||
return "sect409k1";
|
|
||||||
case "1.3.132.0.37":
|
|
||||||
return "sect409r1";
|
|
||||||
case "1.3.132.0.35":
|
|
||||||
return "secp521r1";
|
|
||||||
case "1.3.132.0.38":
|
|
||||||
return "sect571k1";
|
|
||||||
case "1.3.132.0.39":
|
|
||||||
return "sect571r1";
|
|
||||||
}
|
|
||||||
throw new GeneralSecurityException(
|
|
||||||
"Error parsing EC named curve identifier. Named curve with OID: " + oidString + " is not supported"
|
"Error parsing EC named curve identifier. Named curve with OID: " + oidString + " is not supported"
|
||||||
);
|
);
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -91,10 +91,13 @@ public class SslConfigurationTests extends ESTestCase {
|
||||||
orig.getCipherSuites(),
|
orig.getCipherSuites(),
|
||||||
orig.getSupportedProtocols()
|
orig.getSupportedProtocols()
|
||||||
),
|
),
|
||||||
orig -> {
|
this::mutateSslConfiguration
|
||||||
switch (randomIntBetween(1, 4)) {
|
);
|
||||||
case 1:
|
}
|
||||||
return new SslConfiguration(
|
|
||||||
|
private SslConfiguration mutateSslConfiguration(SslConfiguration orig) {
|
||||||
|
return switch (randomIntBetween(1, 4)) {
|
||||||
|
case 1 -> new SslConfiguration(
|
||||||
true,
|
true,
|
||||||
orig.getTrustConfig(),
|
orig.getTrustConfig(),
|
||||||
orig.getKeyConfig(),
|
orig.getKeyConfig(),
|
||||||
|
@ -103,8 +106,7 @@ public class SslConfigurationTests extends ESTestCase {
|
||||||
orig.getCipherSuites(),
|
orig.getCipherSuites(),
|
||||||
orig.getSupportedProtocols()
|
orig.getSupportedProtocols()
|
||||||
);
|
);
|
||||||
case 2:
|
case 2 -> new SslConfiguration(
|
||||||
return new SslConfiguration(
|
|
||||||
true,
|
true,
|
||||||
orig.getTrustConfig(),
|
orig.getTrustConfig(),
|
||||||
orig.getKeyConfig(),
|
orig.getKeyConfig(),
|
||||||
|
@ -113,8 +115,7 @@ public class SslConfigurationTests extends ESTestCase {
|
||||||
orig.getCipherSuites(),
|
orig.getCipherSuites(),
|
||||||
orig.getSupportedProtocols()
|
orig.getSupportedProtocols()
|
||||||
);
|
);
|
||||||
case 3:
|
case 3 -> new SslConfiguration(
|
||||||
return new SslConfiguration(
|
|
||||||
true,
|
true,
|
||||||
orig.getTrustConfig(),
|
orig.getTrustConfig(),
|
||||||
orig.getKeyConfig(),
|
orig.getKeyConfig(),
|
||||||
|
@ -123,9 +124,7 @@ public class SslConfigurationTests extends ESTestCase {
|
||||||
DEFAULT_CIPHERS,
|
DEFAULT_CIPHERS,
|
||||||
orig.getSupportedProtocols()
|
orig.getSupportedProtocols()
|
||||||
);
|
);
|
||||||
case 4:
|
default -> new SslConfiguration(
|
||||||
default:
|
|
||||||
return new SslConfiguration(
|
|
||||||
true,
|
true,
|
||||||
orig.getTrustConfig(),
|
orig.getTrustConfig(),
|
||||||
orig.getKeyConfig(),
|
orig.getKeyConfig(),
|
||||||
|
@ -134,9 +133,7 @@ public class SslConfigurationTests extends ESTestCase {
|
||||||
orig.getCipherSuites(),
|
orig.getCipherSuites(),
|
||||||
Arrays.asList(VALID_PROTOCOLS)
|
Arrays.asList(VALID_PROTOCOLS)
|
||||||
);
|
);
|
||||||
}
|
};
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testDependentFiles() {
|
public void testDependentFiles() {
|
||||||
|
|
|
@ -121,26 +121,13 @@ public final class ObjectParser<Value, Context> extends AbstractObjectParser<Val
|
||||||
return (objectParser, field, location, parser, value, context) -> {
|
return (objectParser, field, location, parser, value, context) -> {
|
||||||
XContentParser.Token t = parser.currentToken();
|
XContentParser.Token t = parser.currentToken();
|
||||||
switch (t) {
|
switch (t) {
|
||||||
case VALUE_STRING:
|
case VALUE_STRING -> consumer.accept(value, field, parser.text());
|
||||||
consumer.accept(value, field, parser.text());
|
case VALUE_NUMBER -> consumer.accept(value, field, parser.numberValue());
|
||||||
break;
|
case VALUE_BOOLEAN -> consumer.accept(value, field, parser.booleanValue());
|
||||||
case VALUE_NUMBER:
|
case VALUE_NULL -> consumer.accept(value, field, null);
|
||||||
consumer.accept(value, field, parser.numberValue());
|
case START_OBJECT -> consumer.accept(value, field, parser.map());
|
||||||
break;
|
case START_ARRAY -> consumer.accept(value, field, parser.list());
|
||||||
case VALUE_BOOLEAN:
|
default -> throw new XContentParseException(
|
||||||
consumer.accept(value, field, parser.booleanValue());
|
|
||||||
break;
|
|
||||||
case VALUE_NULL:
|
|
||||||
consumer.accept(value, field, null);
|
|
||||||
break;
|
|
||||||
case START_OBJECT:
|
|
||||||
consumer.accept(value, field, parser.map());
|
|
||||||
break;
|
|
||||||
case START_ARRAY:
|
|
||||||
consumer.accept(value, field, parser.list());
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new XContentParseException(
|
|
||||||
parser.getTokenLocation(),
|
parser.getTokenLocation(),
|
||||||
"[" + objectParser.name + "] cannot parse field [" + field + "] with value type [" + t + "]"
|
"[" + objectParser.name + "] cannot parse field [" + field + "] with value type [" + t + "]"
|
||||||
);
|
);
|
||||||
|
@ -642,7 +629,7 @@ public final class ObjectParser<Value, Context> extends AbstractObjectParser<Val
|
||||||
private void parseSub(XContentParser parser, FieldParser fieldParser, String currentFieldName, Value value, Context context) {
|
private void parseSub(XContentParser parser, FieldParser fieldParser, String currentFieldName, Value value, Context context) {
|
||||||
final XContentParser.Token token = parser.currentToken();
|
final XContentParser.Token token = parser.currentToken();
|
||||||
switch (token) {
|
switch (token) {
|
||||||
case START_OBJECT:
|
case START_OBJECT -> {
|
||||||
parseValue(parser, fieldParser, currentFieldName, value, context);
|
parseValue(parser, fieldParser, currentFieldName, value, context);
|
||||||
/*
|
/*
|
||||||
* Well behaving parsers should consume the entire object but
|
* Well behaving parsers should consume the entire object but
|
||||||
|
@ -655,8 +642,8 @@ public final class ObjectParser<Value, Context> extends AbstractObjectParser<Val
|
||||||
if (parser.currentToken() != XContentParser.Token.END_OBJECT) {
|
if (parser.currentToken() != XContentParser.Token.END_OBJECT) {
|
||||||
throwMustEndOn(currentFieldName, XContentParser.Token.END_OBJECT);
|
throwMustEndOn(currentFieldName, XContentParser.Token.END_OBJECT);
|
||||||
}
|
}
|
||||||
break;
|
}
|
||||||
case START_ARRAY:
|
case START_ARRAY -> {
|
||||||
parseArray(parser, fieldParser, currentFieldName, value, context);
|
parseArray(parser, fieldParser, currentFieldName, value, context);
|
||||||
/*
|
/*
|
||||||
* Well behaving parsers should consume the entire array but
|
* Well behaving parsers should consume the entire array but
|
||||||
|
@ -669,17 +656,15 @@ public final class ObjectParser<Value, Context> extends AbstractObjectParser<Val
|
||||||
if (parser.currentToken() != XContentParser.Token.END_ARRAY) {
|
if (parser.currentToken() != XContentParser.Token.END_ARRAY) {
|
||||||
throwMustEndOn(currentFieldName, XContentParser.Token.END_ARRAY);
|
throwMustEndOn(currentFieldName, XContentParser.Token.END_ARRAY);
|
||||||
}
|
}
|
||||||
break;
|
}
|
||||||
case END_OBJECT:
|
case END_OBJECT, END_ARRAY, FIELD_NAME -> throw throwUnexpectedToken(parser, token);
|
||||||
case END_ARRAY:
|
case VALUE_STRING, VALUE_NUMBER, VALUE_BOOLEAN, VALUE_EMBEDDED_OBJECT, VALUE_NULL -> parseValue(
|
||||||
case FIELD_NAME:
|
parser,
|
||||||
throw throwUnexpectedToken(parser, token);
|
fieldParser,
|
||||||
case VALUE_STRING:
|
currentFieldName,
|
||||||
case VALUE_NUMBER:
|
value,
|
||||||
case VALUE_BOOLEAN:
|
context
|
||||||
case VALUE_EMBEDDED_OBJECT:
|
);
|
||||||
case VALUE_NULL:
|
|
||||||
parseValue(parser, fieldParser, currentFieldName, value, context);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -137,18 +137,10 @@ public interface XContentGenerator extends Closeable, Flushable {
|
||||||
break;
|
break;
|
||||||
case VALUE_NUMBER:
|
case VALUE_NUMBER:
|
||||||
switch (parser.numberType()) {
|
switch (parser.numberType()) {
|
||||||
case INT:
|
case INT -> writeNumber(parser.intValue());
|
||||||
writeNumber(parser.intValue());
|
case LONG -> writeNumber(parser.longValue());
|
||||||
break;
|
case FLOAT -> writeNumber(parser.floatValue());
|
||||||
case LONG:
|
case DOUBLE -> writeNumber(parser.doubleValue());
|
||||||
writeNumber(parser.longValue());
|
|
||||||
break;
|
|
||||||
case FLOAT:
|
|
||||||
writeNumber(parser.floatValue());
|
|
||||||
break;
|
|
||||||
case DOUBLE:
|
|
||||||
writeNumber(parser.doubleValue());
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
case VALUE_BOOLEAN:
|
case VALUE_BOOLEAN:
|
||||||
|
|
|
@ -413,21 +413,21 @@ public class JsonXContentGenerator implements XContentGenerator {
|
||||||
}
|
}
|
||||||
|
|
||||||
switch (token) {
|
switch (token) {
|
||||||
case START_ARRAY:
|
case START_ARRAY -> {
|
||||||
destination.writeStartArray();
|
destination.writeStartArray();
|
||||||
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
|
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
|
||||||
copyCurrentStructure(destination, parser);
|
copyCurrentStructure(destination, parser);
|
||||||
}
|
}
|
||||||
destination.writeEndArray();
|
destination.writeEndArray();
|
||||||
break;
|
}
|
||||||
case START_OBJECT:
|
case START_OBJECT -> {
|
||||||
destination.writeStartObject();
|
destination.writeStartObject();
|
||||||
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
|
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
|
||||||
copyCurrentStructure(destination, parser);
|
copyCurrentStructure(destination, parser);
|
||||||
}
|
}
|
||||||
destination.writeEndObject();
|
destination.writeEndObject();
|
||||||
break;
|
}
|
||||||
default: // others are simple:
|
default -> // others are simple:
|
||||||
destination.copyCurrentEvent(parser);
|
destination.copyCurrentEvent(parser);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -189,21 +189,14 @@ public class JsonXContentParser extends AbstractXContentParser {
|
||||||
}
|
}
|
||||||
|
|
||||||
private NumberType convertNumberType(JsonParser.NumberType numberType) {
|
private NumberType convertNumberType(JsonParser.NumberType numberType) {
|
||||||
switch (numberType) {
|
return switch (numberType) {
|
||||||
case INT:
|
case INT -> NumberType.INT;
|
||||||
return NumberType.INT;
|
case BIG_INTEGER -> NumberType.BIG_INTEGER;
|
||||||
case BIG_INTEGER:
|
case LONG -> NumberType.LONG;
|
||||||
return NumberType.BIG_INTEGER;
|
case FLOAT -> NumberType.FLOAT;
|
||||||
case LONG:
|
case DOUBLE -> NumberType.DOUBLE;
|
||||||
return NumberType.LONG;
|
case BIG_DECIMAL -> NumberType.BIG_DECIMAL;
|
||||||
case FLOAT:
|
};
|
||||||
return NumberType.FLOAT;
|
|
||||||
case DOUBLE:
|
|
||||||
return NumberType.DOUBLE;
|
|
||||||
case BIG_DECIMAL:
|
|
||||||
return NumberType.BIG_DECIMAL;
|
|
||||||
}
|
|
||||||
throw new IllegalStateException("No matching token for number_type [" + numberType + "]");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private Token convertToken(JsonToken token) {
|
private Token convertToken(JsonToken token) {
|
||||||
|
|
|
@ -81,14 +81,11 @@ public abstract class AbstractXContentParser implements XContentParser {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean isBooleanValue() throws IOException {
|
public boolean isBooleanValue() throws IOException {
|
||||||
switch (currentToken()) {
|
return switch (currentToken()) {
|
||||||
case VALUE_BOOLEAN:
|
case VALUE_BOOLEAN -> true;
|
||||||
return true;
|
case VALUE_STRING -> Booleans.isBoolean(textCharacters(), textOffset(), textLength());
|
||||||
case VALUE_STRING:
|
default -> false;
|
||||||
return Booleans.isBoolean(textCharacters(), textOffset(), textLength());
|
};
|
||||||
default:
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -138,10 +138,8 @@ public class MapXContentParserTests extends ESTestCase {
|
||||||
assertEquals(parser.textOrNull(), mapParser.textOrNull());
|
assertEquals(parser.textOrNull(), mapParser.textOrNull());
|
||||||
}
|
}
|
||||||
switch (token) {
|
switch (token) {
|
||||||
case VALUE_STRING:
|
case VALUE_STRING -> assertEquals(parser.text(), mapParser.text());
|
||||||
assertEquals(parser.text(), mapParser.text());
|
case VALUE_NUMBER -> {
|
||||||
break;
|
|
||||||
case VALUE_NUMBER:
|
|
||||||
assertEquals(parser.numberType(), mapParser.numberType());
|
assertEquals(parser.numberType(), mapParser.numberType());
|
||||||
assertEquals(parser.numberValue(), mapParser.numberValue());
|
assertEquals(parser.numberValue(), mapParser.numberValue());
|
||||||
if (parser.numberType() == XContentParser.NumberType.LONG
|
if (parser.numberType() == XContentParser.NumberType.LONG
|
||||||
|
@ -156,16 +154,10 @@ public class MapXContentParserTests extends ESTestCase {
|
||||||
} else {
|
} else {
|
||||||
assertEquals(parser.doubleValue(), mapParser.doubleValue(), 0.000001);
|
assertEquals(parser.doubleValue(), mapParser.doubleValue(), 0.000001);
|
||||||
}
|
}
|
||||||
break;
|
}
|
||||||
case VALUE_BOOLEAN:
|
case VALUE_BOOLEAN -> assertEquals(parser.booleanValue(), mapParser.booleanValue());
|
||||||
assertEquals(parser.booleanValue(), mapParser.booleanValue());
|
case VALUE_EMBEDDED_OBJECT -> assertArrayEquals(parser.binaryValue(), mapParser.binaryValue());
|
||||||
break;
|
case VALUE_NULL -> assertNull(mapParser.textOrNull());
|
||||||
case VALUE_EMBEDDED_OBJECT:
|
|
||||||
assertArrayEquals(parser.binaryValue(), mapParser.binaryValue());
|
|
||||||
break;
|
|
||||||
case VALUE_NULL:
|
|
||||||
assertNull(mapParser.textOrNull());
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
assertEquals(parser.currentName(), mapParser.currentName());
|
assertEquals(parser.currentName(), mapParser.currentName());
|
||||||
assertEquals(parser.isClosed(), mapParser.isClosed());
|
assertEquals(parser.isClosed(), mapParser.isClosed());
|
||||||
|
|
|
@ -69,20 +69,9 @@ public class XContentParserTests extends ESTestCase {
|
||||||
assertEquals(value, number.floatValue(), 0.0f);
|
assertEquals(value, number.floatValue(), 0.0f);
|
||||||
|
|
||||||
switch (xContentType) {
|
switch (xContentType) {
|
||||||
case VND_CBOR:
|
case VND_CBOR, VND_SMILE, CBOR, SMILE -> assertThat(number, instanceOf(Float.class));
|
||||||
case VND_SMILE:
|
case VND_JSON, VND_YAML, JSON, YAML -> assertThat(number, instanceOf(Double.class));
|
||||||
case CBOR:
|
default -> throw new AssertionError("unexpected x-content type [" + xContentType + "]");
|
||||||
case SMILE:
|
|
||||||
assertThat(number, instanceOf(Float.class));
|
|
||||||
break;
|
|
||||||
case VND_JSON:
|
|
||||||
case VND_YAML:
|
|
||||||
case JSON:
|
|
||||||
case YAML:
|
|
||||||
assertThat(number, instanceOf(Double.class));
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new AssertionError("unexpected x-content type [" + xContentType + "]");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -122,18 +122,12 @@ public abstract class AbstractXContentFilteringTestCase extends AbstractFilterin
|
||||||
}
|
}
|
||||||
assertThat(token1, equalTo(token2));
|
assertThat(token1, equalTo(token2));
|
||||||
switch (token1) {
|
switch (token1) {
|
||||||
case FIELD_NAME:
|
case FIELD_NAME -> assertThat(jsonParser.currentName(), equalTo(testParser.currentName()));
|
||||||
assertThat(jsonParser.currentName(), equalTo(testParser.currentName()));
|
case VALUE_STRING -> assertThat(jsonParser.text(), equalTo(testParser.text()));
|
||||||
break;
|
case VALUE_NUMBER -> {
|
||||||
case VALUE_STRING:
|
|
||||||
assertThat(jsonParser.text(), equalTo(testParser.text()));
|
|
||||||
break;
|
|
||||||
case VALUE_NUMBER:
|
|
||||||
assertThat(jsonParser.numberType(), equalTo(testParser.numberType()));
|
assertThat(jsonParser.numberType(), equalTo(testParser.numberType()));
|
||||||
assertThat(jsonParser.numberValue(), equalTo(testParser.numberValue()));
|
assertThat(jsonParser.numberValue(), equalTo(testParser.numberValue()));
|
||||||
break;
|
}
|
||||||
default:
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
|
|
|
@ -203,24 +203,16 @@ public class InternalMatrixStats extends InternalAggregation implements MatrixSt
|
||||||
if (results == null) {
|
if (results == null) {
|
||||||
return emptyMap();
|
return emptyMap();
|
||||||
}
|
}
|
||||||
switch (element) {
|
return switch (element) {
|
||||||
case "counts":
|
case "counts" -> results.getFieldCounts();
|
||||||
return results.getFieldCounts();
|
case "means" -> results.getMeans();
|
||||||
case "means":
|
case "variances" -> results.getVariances();
|
||||||
return results.getMeans();
|
case "skewness" -> results.getSkewness();
|
||||||
case "variances":
|
case "kurtosis" -> results.getKurtosis();
|
||||||
return results.getVariances();
|
case "covariance" -> results.getCovariances();
|
||||||
case "skewness":
|
case "correlation" -> results.getCorrelations();
|
||||||
return results.getSkewness();
|
default -> throw new IllegalArgumentException("Found unknown path element [" + element + "] in [" + getName() + "]");
|
||||||
case "kurtosis":
|
};
|
||||||
return results.getKurtosis();
|
|
||||||
case "covariance":
|
|
||||||
return results.getCovariances();
|
|
||||||
case "correlation":
|
|
||||||
return results.getCorrelations();
|
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("Found unknown path element [" + element + "] in [" + getName() + "]");
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
throw new IllegalArgumentException("path not supported for [" + getName() + "]: " + path);
|
throw new IllegalArgumentException("path not supported for [" + getName() + "]: " + path);
|
||||||
}
|
}
|
||||||
|
|
|
@ -47,23 +47,12 @@ public class CharGroupTokenizerFactory extends AbstractTokenizerFactory {
|
||||||
tokenizeOnChars.add((int) parseEscapedChar(c));
|
tokenizeOnChars.add((int) parseEscapedChar(c));
|
||||||
} else {
|
} else {
|
||||||
switch (c) {
|
switch (c) {
|
||||||
case "letter":
|
case "letter" -> tokenizeOnLetter = true;
|
||||||
tokenizeOnLetter = true;
|
case "digit" -> tokenizeOnDigit = true;
|
||||||
break;
|
case "whitespace" -> tokenizeOnSpace = true;
|
||||||
case "digit":
|
case "punctuation" -> tokenizeOnPunctuation = true;
|
||||||
tokenizeOnDigit = true;
|
case "symbol" -> tokenizeOnSymbol = true;
|
||||||
break;
|
default -> throw new RuntimeException("Invalid escaped char in [" + c + "]");
|
||||||
case "whitespace":
|
|
||||||
tokenizeOnSpace = true;
|
|
||||||
break;
|
|
||||||
case "punctuation":
|
|
||||||
tokenizeOnPunctuation = true;
|
|
||||||
break;
|
|
||||||
case "symbol":
|
|
||||||
tokenizeOnSymbol = true;
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new RuntimeException("Invalid escaped char in [" + c + "]");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -38,14 +38,11 @@ public class EdgeNGramTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||||
}
|
}
|
||||||
|
|
||||||
static int parseSide(String side) {
|
static int parseSide(String side) {
|
||||||
switch (side) {
|
return switch (side) {
|
||||||
case "front":
|
case "front" -> SIDE_FRONT;
|
||||||
return SIDE_FRONT;
|
case "back" -> SIDE_BACK;
|
||||||
case "back":
|
default -> throw new IllegalArgumentException("invalid side: " + side);
|
||||||
return SIDE_BACK;
|
};
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("invalid side: " + side);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -75,29 +75,17 @@ public class MappingCharFilterFactory extends AbstractCharFilterFactory implemen
|
||||||
if (readPos >= len) throw new RuntimeException("Invalid escaped char in [" + s + "]");
|
if (readPos >= len) throw new RuntimeException("Invalid escaped char in [" + s + "]");
|
||||||
c = s.charAt(readPos++);
|
c = s.charAt(readPos++);
|
||||||
switch (c) {
|
switch (c) {
|
||||||
case '\\':
|
case '\\' -> c = '\\';
|
||||||
c = '\\';
|
case 'n' -> c = '\n';
|
||||||
break;
|
case 't' -> c = '\t';
|
||||||
case 'n':
|
case 'r' -> c = '\r';
|
||||||
c = '\n';
|
case 'b' -> c = '\b';
|
||||||
break;
|
case 'f' -> c = '\f';
|
||||||
case 't':
|
case 'u' -> {
|
||||||
c = '\t';
|
|
||||||
break;
|
|
||||||
case 'r':
|
|
||||||
c = '\r';
|
|
||||||
break;
|
|
||||||
case 'b':
|
|
||||||
c = '\b';
|
|
||||||
break;
|
|
||||||
case 'f':
|
|
||||||
c = '\f';
|
|
||||||
break;
|
|
||||||
case 'u':
|
|
||||||
if (readPos + 3 >= len) throw new RuntimeException("Invalid escaped char in [" + s + "]");
|
if (readPos + 3 >= len) throw new RuntimeException("Invalid escaped char in [" + s + "]");
|
||||||
c = (char) Integer.parseInt(s.substring(readPos, readPos + 4), 16);
|
c = (char) Integer.parseInt(s.substring(readPos, readPos + 4), 16);
|
||||||
readPos += 4;
|
readPos += 4;
|
||||||
break;
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
out[writePos++] = c;
|
out[writePos++] = c;
|
||||||
|
|
|
@ -152,29 +152,17 @@ public class WordDelimiterTokenFilterFactory extends AbstractTokenFilterFactory
|
||||||
if (readPos >= len) throw new RuntimeException("Invalid escaped char in [" + s + "]");
|
if (readPos >= len) throw new RuntimeException("Invalid escaped char in [" + s + "]");
|
||||||
c = s.charAt(readPos++);
|
c = s.charAt(readPos++);
|
||||||
switch (c) {
|
switch (c) {
|
||||||
case '\\':
|
case '\\' -> c = '\\';
|
||||||
c = '\\';
|
case 'n' -> c = '\n';
|
||||||
break;
|
case 't' -> c = '\t';
|
||||||
case 'n':
|
case 'r' -> c = '\r';
|
||||||
c = '\n';
|
case 'b' -> c = '\b';
|
||||||
break;
|
case 'f' -> c = '\f';
|
||||||
case 't':
|
case 'u' -> {
|
||||||
c = '\t';
|
|
||||||
break;
|
|
||||||
case 'r':
|
|
||||||
c = '\r';
|
|
||||||
break;
|
|
||||||
case 'b':
|
|
||||||
c = '\b';
|
|
||||||
break;
|
|
||||||
case 'f':
|
|
||||||
c = '\f';
|
|
||||||
break;
|
|
||||||
case 'u':
|
|
||||||
if (readPos + 3 >= len) throw new RuntimeException("Invalid escaped char in [" + s + "]");
|
if (readPos + 3 >= len) throw new RuntimeException("Invalid escaped char in [" + s + "]");
|
||||||
c = (char) Integer.parseInt(s.substring(readPos, readPos + 4), 16);
|
c = (char) Integer.parseInt(s.substring(readPos, readPos + 4), 16);
|
||||||
readPos += 4;
|
readPos += 4;
|
||||||
break;
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
out[writePos++] = c;
|
out[writePos++] = c;
|
||||||
|
|
|
@ -226,25 +226,21 @@ public final class CommunityIdProcessor extends AbstractProcessor {
|
||||||
flow.protocol = Transport.fromObject(protocol);
|
flow.protocol = Transport.fromObject(protocol);
|
||||||
|
|
||||||
switch (flow.protocol) {
|
switch (flow.protocol) {
|
||||||
case Tcp:
|
case Tcp, Udp, Sctp -> {
|
||||||
case Udp:
|
|
||||||
case Sctp:
|
|
||||||
flow.sourcePort = parseIntFromObjectOrString(sourcePort.get(), "source port");
|
flow.sourcePort = parseIntFromObjectOrString(sourcePort.get(), "source port");
|
||||||
if (flow.sourcePort < 1 || flow.sourcePort > 65535) {
|
if (flow.sourcePort < 1 || flow.sourcePort > 65535) {
|
||||||
throw new IllegalArgumentException("invalid source port [" + sourcePort.get() + "]");
|
throw new IllegalArgumentException("invalid source port [" + sourcePort.get() + "]");
|
||||||
}
|
}
|
||||||
|
|
||||||
flow.destinationPort = parseIntFromObjectOrString(destinationPort.get(), "destination port");
|
flow.destinationPort = parseIntFromObjectOrString(destinationPort.get(), "destination port");
|
||||||
if (flow.destinationPort < 1 || flow.destinationPort > 65535) {
|
if (flow.destinationPort < 1 || flow.destinationPort > 65535) {
|
||||||
throw new IllegalArgumentException("invalid destination port [" + destinationPort.get() + "]");
|
throw new IllegalArgumentException("invalid destination port [" + destinationPort.get() + "]");
|
||||||
}
|
}
|
||||||
break;
|
}
|
||||||
case Icmp:
|
case Icmp, IcmpIpV6 -> {
|
||||||
case IcmpIpV6:
|
|
||||||
// tolerate missing or invalid ICMP types and codes
|
// tolerate missing or invalid ICMP types and codes
|
||||||
flow.icmpType = parseIntFromObjectOrString(icmpType, "icmp type");
|
flow.icmpType = parseIntFromObjectOrString(icmpType, "icmp type");
|
||||||
flow.icmpCode = parseIntFromObjectOrString(icmpCode, "icmp code");
|
flow.icmpCode = parseIntFromObjectOrString(icmpCode, "icmp code");
|
||||||
break;
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return flow;
|
return flow;
|
||||||
|
@ -441,30 +437,19 @@ public final class CommunityIdProcessor extends AbstractProcessor {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Transport fromNumber(int transportNumber) {
|
public static Transport fromNumber(int transportNumber) {
|
||||||
switch (transportNumber) {
|
return switch (transportNumber) {
|
||||||
case 1:
|
case 1 -> Icmp;
|
||||||
return Icmp;
|
case 2 -> Igmp;
|
||||||
case 2:
|
case 6 -> Tcp;
|
||||||
return Igmp;
|
case 17 -> Udp;
|
||||||
case 6:
|
case 47 -> Gre;
|
||||||
return Tcp;
|
case 58 -> IcmpIpV6;
|
||||||
case 17:
|
case 88 -> Eigrp;
|
||||||
return Udp;
|
case 89 -> Ospf;
|
||||||
case 47:
|
case 103 -> Pim;
|
||||||
return Gre;
|
case 132 -> Sctp;
|
||||||
case 58:
|
default -> throw new IllegalArgumentException("unknown transport protocol number [" + transportNumber + "]");
|
||||||
return IcmpIpV6;
|
};
|
||||||
case 88:
|
|
||||||
return Eigrp;
|
|
||||||
case 89:
|
|
||||||
return Ospf;
|
|
||||||
case 103:
|
|
||||||
return Pim;
|
|
||||||
case 132:
|
|
||||||
return Sctp;
|
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("unknown transport protocol number [" + transportNumber + "]");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Transport fromObject(Object o) {
|
public static Transport fromObject(Object o) {
|
||||||
|
@ -557,55 +542,33 @@ public final class CommunityIdProcessor extends AbstractProcessor {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static IcmpType fromNumber(int type) {
|
public static IcmpType fromNumber(int type) {
|
||||||
switch (type) {
|
return switch (type) {
|
||||||
case 0:
|
case 0 -> EchoReply;
|
||||||
return EchoReply;
|
case 8 -> EchoRequest;
|
||||||
case 8:
|
case 9 -> RouterAdvertisement;
|
||||||
return EchoRequest;
|
case 10 -> RouterSolicitation;
|
||||||
case 9:
|
case 13 -> TimestampRequest;
|
||||||
return RouterAdvertisement;
|
case 14 -> TimestampReply;
|
||||||
case 10:
|
case 15 -> InfoRequest;
|
||||||
return RouterSolicitation;
|
case 16 -> InfoReply;
|
||||||
case 13:
|
case 17 -> AddressMaskRequest;
|
||||||
return TimestampRequest;
|
case 18 -> AddressMaskReply;
|
||||||
case 14:
|
case 128 -> V6EchoRequest;
|
||||||
return TimestampReply;
|
case 129 -> V6EchoReply;
|
||||||
case 15:
|
case 133 -> V6RouterSolicitation;
|
||||||
return InfoRequest;
|
case 134 -> V6RouterAdvertisement;
|
||||||
case 16:
|
case 135 -> V6NeighborSolicitation;
|
||||||
return InfoReply;
|
case 136 -> V6NeighborAdvertisement;
|
||||||
case 17:
|
case 130 -> V6MLDv1MulticastListenerQueryMessage;
|
||||||
return AddressMaskRequest;
|
case 131 -> V6MLDv1MulticastListenerReportMessage;
|
||||||
case 18:
|
case 139 -> V6WhoAreYouRequest;
|
||||||
return AddressMaskReply;
|
case 140 -> V6WhoAreYouReply;
|
||||||
case 128:
|
case 144 -> V6HomeAddressDiscoveryRequest;
|
||||||
return V6EchoRequest;
|
case 145 -> V6HomeAddressDiscoveryResponse;
|
||||||
case 129:
|
default ->
|
||||||
return V6EchoReply;
|
|
||||||
case 133:
|
|
||||||
return V6RouterSolicitation;
|
|
||||||
case 134:
|
|
||||||
return V6RouterAdvertisement;
|
|
||||||
case 135:
|
|
||||||
return V6NeighborSolicitation;
|
|
||||||
case 136:
|
|
||||||
return V6NeighborAdvertisement;
|
|
||||||
case 130:
|
|
||||||
return V6MLDv1MulticastListenerQueryMessage;
|
|
||||||
case 131:
|
|
||||||
return V6MLDv1MulticastListenerReportMessage;
|
|
||||||
case 139:
|
|
||||||
return V6WhoAreYouRequest;
|
|
||||||
case 140:
|
|
||||||
return V6WhoAreYouReply;
|
|
||||||
case 144:
|
|
||||||
return V6HomeAddressDiscoveryRequest;
|
|
||||||
case 145:
|
|
||||||
return V6HomeAddressDiscoveryResponse;
|
|
||||||
default:
|
|
||||||
// don't fail if the type is unknown
|
// don't fail if the type is unknown
|
||||||
return EchoReply;
|
EchoReply;
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Integer codeEquivalent(int icmpType, boolean isIpV6) {
|
public static Integer codeEquivalent(int icmpType, boolean isIpV6) {
|
||||||
|
|
|
@ -75,14 +75,9 @@ final class CsvParser {
|
||||||
|
|
||||||
// we've reached end of string, we need to handle last field
|
// we've reached end of string, we need to handle last field
|
||||||
switch (state) {
|
switch (state) {
|
||||||
case UNQUOTED:
|
case UNQUOTED -> setField(length);
|
||||||
setField(length);
|
case QUOTED_END -> setField(length - 1);
|
||||||
break;
|
case QUOTED -> throw new IllegalArgumentException("Unmatched quote");
|
||||||
case QUOTED_END:
|
|
||||||
setField(length - 1);
|
|
||||||
break;
|
|
||||||
case QUOTED:
|
|
||||||
throw new IllegalArgumentException("Unmatched quote");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -118,17 +118,12 @@ enum DateFormat {
|
||||||
abstract Function<String, ZonedDateTime> getFunction(String format, ZoneId timezone, Locale locale);
|
abstract Function<String, ZonedDateTime> getFunction(String format, ZoneId timezone, Locale locale);
|
||||||
|
|
||||||
static DateFormat fromString(String format) {
|
static DateFormat fromString(String format) {
|
||||||
switch (format) {
|
return switch (format) {
|
||||||
case "ISO8601":
|
case "ISO8601" -> Iso8601;
|
||||||
return Iso8601;
|
case "UNIX" -> Unix;
|
||||||
case "UNIX":
|
case "UNIX_MS" -> UnixMs;
|
||||||
return Unix;
|
case "TAI64N" -> Tai64n;
|
||||||
case "UNIX_MS":
|
default -> Java;
|
||||||
return UnixMs;
|
};
|
||||||
case "TAI64N":
|
|
||||||
return Tai64n;
|
|
||||||
default:
|
|
||||||
return Java;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -165,29 +165,18 @@ public class NetworkDirectionProcessor extends AbstractProcessor {
|
||||||
|
|
||||||
private boolean inNetwork(String ip, String network) {
|
private boolean inNetwork(String ip, String network) {
|
||||||
InetAddress address = InetAddresses.forString(ip);
|
InetAddress address = InetAddresses.forString(ip);
|
||||||
switch (network) {
|
return switch (network) {
|
||||||
case LOOPBACK_NAMED_NETWORK:
|
case LOOPBACK_NAMED_NETWORK -> isLoopback(address);
|
||||||
return isLoopback(address);
|
case GLOBAL_UNICAST_NAMED_NETWORK, UNICAST_NAMED_NETWORK -> isUnicast(address);
|
||||||
case GLOBAL_UNICAST_NAMED_NETWORK:
|
case LINK_LOCAL_UNICAST_NAMED_NETWORK -> isLinkLocalUnicast(address);
|
||||||
case UNICAST_NAMED_NETWORK:
|
case INTERFACE_LOCAL_NAMED_NETWORK -> isInterfaceLocalMulticast(address);
|
||||||
return isUnicast(address);
|
case LINK_LOCAL_MULTICAST_NAMED_NETWORK -> isLinkLocalMulticast(address);
|
||||||
case LINK_LOCAL_UNICAST_NAMED_NETWORK:
|
case MULTICAST_NAMED_NETWORK -> isMulticast(address);
|
||||||
return isLinkLocalUnicast(address);
|
case UNSPECIFIED_NAMED_NETWORK -> isUnspecified(address);
|
||||||
case INTERFACE_LOCAL_NAMED_NETWORK:
|
case PRIVATE_NAMED_NETWORK -> isPrivate(ip);
|
||||||
return isInterfaceLocalMulticast(address);
|
case PUBLIC_NAMED_NETWORK -> isPublic(ip);
|
||||||
case LINK_LOCAL_MULTICAST_NAMED_NETWORK:
|
default -> CIDRUtils.isInRange(ip, network);
|
||||||
return isLinkLocalMulticast(address);
|
};
|
||||||
case MULTICAST_NAMED_NETWORK:
|
|
||||||
return isMulticast(address);
|
|
||||||
case UNSPECIFIED_NAMED_NETWORK:
|
|
||||||
return isUnspecified(address);
|
|
||||||
case PRIVATE_NAMED_NETWORK:
|
|
||||||
return isPrivate(ip);
|
|
||||||
case PUBLIC_NAMED_NETWORK:
|
|
||||||
return isPublic(ip);
|
|
||||||
default:
|
|
||||||
return CIDRUtils.isInRange(ip, network);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean isLoopback(InetAddress ip) {
|
private boolean isLoopback(InetAddress ip) {
|
||||||
|
|
|
@ -383,23 +383,22 @@ public class ConvertProcessorTests extends ESTestCase {
|
||||||
Object fieldValue;
|
Object fieldValue;
|
||||||
String expectedFieldValue;
|
String expectedFieldValue;
|
||||||
switch (randomIntBetween(0, 2)) {
|
switch (randomIntBetween(0, 2)) {
|
||||||
case 0:
|
case 0 -> {
|
||||||
float randomFloat = randomFloat();
|
float randomFloat = randomFloat();
|
||||||
fieldValue = randomFloat;
|
fieldValue = randomFloat;
|
||||||
expectedFieldValue = Float.toString(randomFloat);
|
expectedFieldValue = Float.toString(randomFloat);
|
||||||
break;
|
}
|
||||||
case 1:
|
case 1 -> {
|
||||||
int randomInt = randomInt();
|
int randomInt = randomInt();
|
||||||
fieldValue = randomInt;
|
fieldValue = randomInt;
|
||||||
expectedFieldValue = Integer.toString(randomInt);
|
expectedFieldValue = Integer.toString(randomInt);
|
||||||
break;
|
}
|
||||||
case 2:
|
case 2 -> {
|
||||||
boolean randomBoolean = randomBoolean();
|
boolean randomBoolean = randomBoolean();
|
||||||
fieldValue = randomBoolean;
|
fieldValue = randomBoolean;
|
||||||
expectedFieldValue = Boolean.toString(randomBoolean);
|
expectedFieldValue = Boolean.toString(randomBoolean);
|
||||||
break;
|
}
|
||||||
default:
|
default -> throw new UnsupportedOperationException();
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
}
|
||||||
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue);
|
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue);
|
||||||
|
|
||||||
|
@ -417,33 +416,32 @@ public class ConvertProcessorTests extends ESTestCase {
|
||||||
Object randomValue;
|
Object randomValue;
|
||||||
String randomValueString;
|
String randomValueString;
|
||||||
switch (randomIntBetween(0, 2)) {
|
switch (randomIntBetween(0, 2)) {
|
||||||
case 0:
|
case 0 -> {
|
||||||
float randomFloat = randomFloat();
|
float randomFloat = randomFloat();
|
||||||
randomValue = randomFloat;
|
randomValue = randomFloat;
|
||||||
randomValueString = Float.toString(randomFloat);
|
randomValueString = Float.toString(randomFloat);
|
||||||
break;
|
}
|
||||||
case 1:
|
case 1 -> {
|
||||||
int randomInt = randomInt();
|
int randomInt = randomInt();
|
||||||
randomValue = randomInt;
|
randomValue = randomInt;
|
||||||
randomValueString = Integer.toString(randomInt);
|
randomValueString = Integer.toString(randomInt);
|
||||||
break;
|
}
|
||||||
case 2:
|
case 2 -> {
|
||||||
boolean randomBoolean = randomBoolean();
|
boolean randomBoolean = randomBoolean();
|
||||||
randomValue = randomBoolean;
|
randomValue = randomBoolean;
|
||||||
randomValueString = Boolean.toString(randomBoolean);
|
randomValueString = Boolean.toString(randomBoolean);
|
||||||
break;
|
}
|
||||||
case 3:
|
case 3 -> {
|
||||||
long randomLong = randomLong();
|
long randomLong = randomLong();
|
||||||
randomValue = randomLong;
|
randomValue = randomLong;
|
||||||
randomValueString = Long.toString(randomLong);
|
randomValueString = Long.toString(randomLong);
|
||||||
break;
|
}
|
||||||
case 4:
|
case 4 -> {
|
||||||
double randomDouble = randomDouble();
|
double randomDouble = randomDouble();
|
||||||
randomValue = randomDouble;
|
randomValue = randomDouble;
|
||||||
randomValueString = Double.toString(randomDouble);
|
randomValueString = Double.toString(randomDouble);
|
||||||
break;
|
}
|
||||||
default:
|
default -> throw new UnsupportedOperationException();
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
}
|
||||||
fieldValue.add(randomValue);
|
fieldValue.add(randomValue);
|
||||||
expectedList.add(randomValueString);
|
expectedList.add(randomValueString);
|
||||||
|
@ -501,20 +499,19 @@ public class ConvertProcessorTests extends ESTestCase {
|
||||||
public void testAutoConvertNotString() throws Exception {
|
public void testAutoConvertNotString() throws Exception {
|
||||||
Object randomValue;
|
Object randomValue;
|
||||||
switch (randomIntBetween(0, 2)) {
|
switch (randomIntBetween(0, 2)) {
|
||||||
case 0:
|
case 0 -> {
|
||||||
float randomFloat = randomFloat();
|
float randomFloat = randomFloat();
|
||||||
randomValue = randomFloat;
|
randomValue = randomFloat;
|
||||||
break;
|
}
|
||||||
case 1:
|
case 1 -> {
|
||||||
int randomInt = randomInt();
|
int randomInt = randomInt();
|
||||||
randomValue = randomInt;
|
randomValue = randomInt;
|
||||||
break;
|
}
|
||||||
case 2:
|
case 2 -> {
|
||||||
boolean randomBoolean = randomBoolean();
|
boolean randomBoolean = randomBoolean();
|
||||||
randomValue = randomBoolean;
|
randomValue = randomBoolean;
|
||||||
break;
|
}
|
||||||
default:
|
default -> throw new UnsupportedOperationException();
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
}
|
||||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", randomValue));
|
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", randomValue));
|
||||||
Processor processor = new ConvertProcessor(randomAlphaOfLength(10), null, "field", "field", Type.AUTO, false);
|
Processor processor = new ConvertProcessor(randomAlphaOfLength(10), null, "field", "field", Type.AUTO, false);
|
||||||
|
|
|
@ -224,28 +224,26 @@ public final class GeoIpProcessor extends AbstractProcessor {
|
||||||
Map<String, Object> geoData = new HashMap<>();
|
Map<String, Object> geoData = new HashMap<>();
|
||||||
for (Property property : this.properties) {
|
for (Property property : this.properties) {
|
||||||
switch (property) {
|
switch (property) {
|
||||||
case IP:
|
case IP -> geoData.put("ip", NetworkAddress.format(ipAddress));
|
||||||
geoData.put("ip", NetworkAddress.format(ipAddress));
|
case COUNTRY_ISO_CODE -> {
|
||||||
break;
|
|
||||||
case COUNTRY_ISO_CODE:
|
|
||||||
String countryIsoCode = country.getIsoCode();
|
String countryIsoCode = country.getIsoCode();
|
||||||
if (countryIsoCode != null) {
|
if (countryIsoCode != null) {
|
||||||
geoData.put("country_iso_code", countryIsoCode);
|
geoData.put("country_iso_code", countryIsoCode);
|
||||||
}
|
}
|
||||||
break;
|
}
|
||||||
case COUNTRY_NAME:
|
case COUNTRY_NAME -> {
|
||||||
String countryName = country.getName();
|
String countryName = country.getName();
|
||||||
if (countryName != null) {
|
if (countryName != null) {
|
||||||
geoData.put("country_name", countryName);
|
geoData.put("country_name", countryName);
|
||||||
}
|
}
|
||||||
break;
|
}
|
||||||
case CONTINENT_NAME:
|
case CONTINENT_NAME -> {
|
||||||
String continentName = continent.getName();
|
String continentName = continent.getName();
|
||||||
if (continentName != null) {
|
if (continentName != null) {
|
||||||
geoData.put("continent_name", continentName);
|
geoData.put("continent_name", continentName);
|
||||||
}
|
}
|
||||||
break;
|
}
|
||||||
case REGION_ISO_CODE:
|
case REGION_ISO_CODE -> {
|
||||||
// ISO 3166-2 code for country subdivisions.
|
// ISO 3166-2 code for country subdivisions.
|
||||||
// See iso.org/iso-3166-country-codes.html
|
// See iso.org/iso-3166-country-codes.html
|
||||||
String countryIso = country.getIsoCode();
|
String countryIso = country.getIsoCode();
|
||||||
|
@ -254,26 +252,26 @@ public final class GeoIpProcessor extends AbstractProcessor {
|
||||||
String regionIsoCode = countryIso + "-" + subdivisionIso;
|
String regionIsoCode = countryIso + "-" + subdivisionIso;
|
||||||
geoData.put("region_iso_code", regionIsoCode);
|
geoData.put("region_iso_code", regionIsoCode);
|
||||||
}
|
}
|
||||||
break;
|
}
|
||||||
case REGION_NAME:
|
case REGION_NAME -> {
|
||||||
String subdivisionName = subdivision.getName();
|
String subdivisionName = subdivision.getName();
|
||||||
if (subdivisionName != null) {
|
if (subdivisionName != null) {
|
||||||
geoData.put("region_name", subdivisionName);
|
geoData.put("region_name", subdivisionName);
|
||||||
}
|
}
|
||||||
break;
|
}
|
||||||
case CITY_NAME:
|
case CITY_NAME -> {
|
||||||
String cityName = city.getName();
|
String cityName = city.getName();
|
||||||
if (cityName != null) {
|
if (cityName != null) {
|
||||||
geoData.put("city_name", cityName);
|
geoData.put("city_name", cityName);
|
||||||
}
|
}
|
||||||
break;
|
}
|
||||||
case TIMEZONE:
|
case TIMEZONE -> {
|
||||||
String locationTimeZone = location.getTimeZone();
|
String locationTimeZone = location.getTimeZone();
|
||||||
if (locationTimeZone != null) {
|
if (locationTimeZone != null) {
|
||||||
geoData.put("timezone", locationTimeZone);
|
geoData.put("timezone", locationTimeZone);
|
||||||
}
|
}
|
||||||
break;
|
}
|
||||||
case LOCATION:
|
case LOCATION -> {
|
||||||
Double latitude = location.getLatitude();
|
Double latitude = location.getLatitude();
|
||||||
Double longitude = location.getLongitude();
|
Double longitude = location.getLongitude();
|
||||||
if (latitude != null && longitude != null) {
|
if (latitude != null && longitude != null) {
|
||||||
|
@ -282,7 +280,7 @@ public final class GeoIpProcessor extends AbstractProcessor {
|
||||||
locationObject.put("lon", longitude);
|
locationObject.put("lon", longitude);
|
||||||
geoData.put("location", locationObject);
|
geoData.put("location", locationObject);
|
||||||
}
|
}
|
||||||
break;
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return geoData;
|
return geoData;
|
||||||
|
@ -299,27 +297,25 @@ public final class GeoIpProcessor extends AbstractProcessor {
|
||||||
Map<String, Object> geoData = new HashMap<>();
|
Map<String, Object> geoData = new HashMap<>();
|
||||||
for (Property property : this.properties) {
|
for (Property property : this.properties) {
|
||||||
switch (property) {
|
switch (property) {
|
||||||
case IP:
|
case IP -> geoData.put("ip", NetworkAddress.format(ipAddress));
|
||||||
geoData.put("ip", NetworkAddress.format(ipAddress));
|
case COUNTRY_ISO_CODE -> {
|
||||||
break;
|
|
||||||
case COUNTRY_ISO_CODE:
|
|
||||||
String countryIsoCode = country.getIsoCode();
|
String countryIsoCode = country.getIsoCode();
|
||||||
if (countryIsoCode != null) {
|
if (countryIsoCode != null) {
|
||||||
geoData.put("country_iso_code", countryIsoCode);
|
geoData.put("country_iso_code", countryIsoCode);
|
||||||
}
|
}
|
||||||
break;
|
}
|
||||||
case COUNTRY_NAME:
|
case COUNTRY_NAME -> {
|
||||||
String countryName = country.getName();
|
String countryName = country.getName();
|
||||||
if (countryName != null) {
|
if (countryName != null) {
|
||||||
geoData.put("country_name", countryName);
|
geoData.put("country_name", countryName);
|
||||||
}
|
}
|
||||||
break;
|
}
|
||||||
case CONTINENT_NAME:
|
case CONTINENT_NAME -> {
|
||||||
String continentName = continent.getName();
|
String continentName = continent.getName();
|
||||||
if (continentName != null) {
|
if (continentName != null) {
|
||||||
geoData.put("continent_name", continentName);
|
geoData.put("continent_name", continentName);
|
||||||
}
|
}
|
||||||
break;
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return geoData;
|
return geoData;
|
||||||
|
|
|
@ -45,52 +45,36 @@ final class DateField {
|
||||||
static final String GET_SECONDS_METHOD = "getSeconds";
|
static final String GET_SECONDS_METHOD = "getSeconds";
|
||||||
|
|
||||||
static DoubleValuesSource getVariable(IndexFieldData<?> fieldData, String fieldName, String variable) {
|
static DoubleValuesSource getVariable(IndexFieldData<?> fieldData, String fieldName, String variable) {
|
||||||
switch (variable) {
|
return switch (variable) {
|
||||||
case VALUE_VARIABLE:
|
case VALUE_VARIABLE -> new FieldDataValueSource(fieldData, MultiValueMode.MIN);
|
||||||
return new FieldDataValueSource(fieldData, MultiValueMode.MIN);
|
case EMPTY_VARIABLE -> new EmptyMemberValueSource(fieldData);
|
||||||
case EMPTY_VARIABLE:
|
case LENGTH_VARIABLE -> new CountMethodValueSource(fieldData);
|
||||||
return new EmptyMemberValueSource(fieldData);
|
default -> throw new IllegalArgumentException(
|
||||||
case LENGTH_VARIABLE:
|
"Member variable [" + variable + "] does not exist for date field [" + fieldName + "]."
|
||||||
return new CountMethodValueSource(fieldData);
|
);
|
||||||
default:
|
};
|
||||||
throw new IllegalArgumentException("Member variable [" + variable + "] does not exist for date field [" + fieldName + "].");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static DoubleValuesSource getMethod(IndexFieldData<?> fieldData, String fieldName, String method) {
|
static DoubleValuesSource getMethod(IndexFieldData<?> fieldData, String fieldName, String method) {
|
||||||
switch (method) {
|
return switch (method) {
|
||||||
case GETVALUE_METHOD:
|
case GETVALUE_METHOD -> new FieldDataValueSource(fieldData, MultiValueMode.MIN);
|
||||||
return new FieldDataValueSource(fieldData, MultiValueMode.MIN);
|
case ISEMPTY_METHOD -> new EmptyMemberValueSource(fieldData);
|
||||||
case ISEMPTY_METHOD:
|
case SIZE_METHOD -> new CountMethodValueSource(fieldData);
|
||||||
return new EmptyMemberValueSource(fieldData);
|
case MINIMUM_METHOD -> new FieldDataValueSource(fieldData, MultiValueMode.MIN);
|
||||||
case SIZE_METHOD:
|
case MAXIMUM_METHOD -> new FieldDataValueSource(fieldData, MultiValueMode.MAX);
|
||||||
return new CountMethodValueSource(fieldData);
|
case AVERAGE_METHOD -> new FieldDataValueSource(fieldData, MultiValueMode.AVG);
|
||||||
case MINIMUM_METHOD:
|
case MEDIAN_METHOD -> new FieldDataValueSource(fieldData, MultiValueMode.MEDIAN);
|
||||||
return new FieldDataValueSource(fieldData, MultiValueMode.MIN);
|
case SUM_METHOD -> new FieldDataValueSource(fieldData, MultiValueMode.SUM);
|
||||||
case MAXIMUM_METHOD:
|
case COUNT_METHOD -> new CountMethodValueSource(fieldData);
|
||||||
return new FieldDataValueSource(fieldData, MultiValueMode.MAX);
|
case GET_YEAR_METHOD -> new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.YEAR);
|
||||||
case AVERAGE_METHOD:
|
case GET_MONTH_METHOD -> new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.MONTH);
|
||||||
return new FieldDataValueSource(fieldData, MultiValueMode.AVG);
|
case GET_DAY_OF_MONTH_METHOD -> new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.DAY_OF_MONTH);
|
||||||
case MEDIAN_METHOD:
|
case GET_HOUR_OF_DAY_METHOD -> new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.HOUR_OF_DAY);
|
||||||
return new FieldDataValueSource(fieldData, MultiValueMode.MEDIAN);
|
case GET_MINUTES_METHOD -> new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.MINUTE);
|
||||||
case SUM_METHOD:
|
case GET_SECONDS_METHOD -> new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.SECOND);
|
||||||
return new FieldDataValueSource(fieldData, MultiValueMode.SUM);
|
default -> throw new IllegalArgumentException(
|
||||||
case COUNT_METHOD:
|
"Member method [" + method + "] does not exist for date field [" + fieldName + "]."
|
||||||
return new CountMethodValueSource(fieldData);
|
);
|
||||||
case GET_YEAR_METHOD:
|
};
|
||||||
return new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.YEAR);
|
|
||||||
case GET_MONTH_METHOD:
|
|
||||||
return new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.MONTH);
|
|
||||||
case GET_DAY_OF_MONTH_METHOD:
|
|
||||||
return new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.DAY_OF_MONTH);
|
|
||||||
case GET_HOUR_OF_DAY_METHOD:
|
|
||||||
return new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.HOUR_OF_DAY);
|
|
||||||
case GET_MINUTES_METHOD:
|
|
||||||
return new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.MINUTE);
|
|
||||||
case GET_SECONDS_METHOD:
|
|
||||||
return new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.SECOND);
|
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("Member method [" + method + "] does not exist for date field [" + fieldName + "].");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -64,112 +64,154 @@ final class DateObject {
|
||||||
static final String GETYEAR_OF_ERA_METHOD = "getYearOfEra";
|
static final String GETYEAR_OF_ERA_METHOD = "getYearOfEra";
|
||||||
|
|
||||||
static DoubleValuesSource getVariable(IndexFieldData<?> fieldData, String fieldName, String variable) {
|
static DoubleValuesSource getVariable(IndexFieldData<?> fieldData, String fieldName, String variable) {
|
||||||
switch (variable) {
|
return switch (variable) {
|
||||||
case CENTURY_OF_ERA_VARIABLE:
|
case CENTURY_OF_ERA_VARIABLE -> new DateObjectValueSource(
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, zdt -> zdt.get(ChronoField.YEAR_OF_ERA) / 100);
|
fieldData,
|
||||||
case DAY_OF_MONTH_VARIABLE:
|
MultiValueMode.MIN,
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, ZonedDateTime::getDayOfMonth);
|
variable,
|
||||||
case DAY_OF_WEEK_VARIABLE:
|
zdt -> zdt.get(ChronoField.YEAR_OF_ERA) / 100
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, zdt -> zdt.getDayOfWeek().getValue());
|
);
|
||||||
case DAY_OF_YEAR_VARIABLE:
|
case DAY_OF_MONTH_VARIABLE -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, ZonedDateTime::getDayOfMonth);
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, ZonedDateTime::getDayOfYear);
|
case DAY_OF_WEEK_VARIABLE -> new DateObjectValueSource(
|
||||||
case ERA_VARIABLE:
|
fieldData,
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, zdt -> zdt.get(ChronoField.ERA));
|
MultiValueMode.MIN,
|
||||||
case HOUR_OF_DAY_VARIABLE:
|
variable,
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, ZonedDateTime::getHour);
|
zdt -> zdt.getDayOfWeek().getValue()
|
||||||
case MILLIS_OF_DAY_VARIABLE:
|
);
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, zdt -> zdt.get(ChronoField.MILLI_OF_DAY));
|
case DAY_OF_YEAR_VARIABLE -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, ZonedDateTime::getDayOfYear);
|
||||||
case MILLIS_OF_SECOND_VARIABLE:
|
case ERA_VARIABLE -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, zdt -> zdt.get(ChronoField.ERA));
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, zdt -> zdt.get(ChronoField.MILLI_OF_SECOND));
|
case HOUR_OF_DAY_VARIABLE -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, ZonedDateTime::getHour);
|
||||||
case MINUTE_OF_DAY_VARIABLE:
|
case MILLIS_OF_DAY_VARIABLE -> new DateObjectValueSource(
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, zdt -> zdt.get(ChronoField.MINUTE_OF_DAY));
|
fieldData,
|
||||||
case MINUTE_OF_HOUR_VARIABLE:
|
MultiValueMode.MIN,
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, ZonedDateTime::getMinute);
|
variable,
|
||||||
case MONTH_OF_YEAR_VARIABLE:
|
zdt -> zdt.get(ChronoField.MILLI_OF_DAY)
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, ZonedDateTime::getMonthValue);
|
);
|
||||||
case SECOND_OF_DAY_VARIABLE:
|
case MILLIS_OF_SECOND_VARIABLE -> new DateObjectValueSource(
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, zdt -> zdt.get(ChronoField.SECOND_OF_DAY));
|
fieldData,
|
||||||
case SECOND_OF_MINUTE_VARIABLE:
|
MultiValueMode.MIN,
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, ZonedDateTime::getSecond);
|
variable,
|
||||||
case WEEK_OF_WEEK_YEAR_VARIABLE:
|
zdt -> zdt.get(ChronoField.MILLI_OF_SECOND)
|
||||||
return new DateObjectValueSource(
|
);
|
||||||
|
case MINUTE_OF_DAY_VARIABLE -> new DateObjectValueSource(
|
||||||
|
fieldData,
|
||||||
|
MultiValueMode.MIN,
|
||||||
|
variable,
|
||||||
|
zdt -> zdt.get(ChronoField.MINUTE_OF_DAY)
|
||||||
|
);
|
||||||
|
case MINUTE_OF_HOUR_VARIABLE -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, ZonedDateTime::getMinute);
|
||||||
|
case MONTH_OF_YEAR_VARIABLE -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, ZonedDateTime::getMonthValue);
|
||||||
|
case SECOND_OF_DAY_VARIABLE -> new DateObjectValueSource(
|
||||||
|
fieldData,
|
||||||
|
MultiValueMode.MIN,
|
||||||
|
variable,
|
||||||
|
zdt -> zdt.get(ChronoField.SECOND_OF_DAY)
|
||||||
|
);
|
||||||
|
case SECOND_OF_MINUTE_VARIABLE -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, ZonedDateTime::getSecond);
|
||||||
|
case WEEK_OF_WEEK_YEAR_VARIABLE -> new DateObjectValueSource(
|
||||||
fieldData,
|
fieldData,
|
||||||
MultiValueMode.MIN,
|
MultiValueMode.MIN,
|
||||||
variable,
|
variable,
|
||||||
zdt -> zdt.get(DateFormatters.WEEK_FIELDS_ROOT.weekOfWeekBasedYear())
|
zdt -> zdt.get(DateFormatters.WEEK_FIELDS_ROOT.weekOfWeekBasedYear())
|
||||||
);
|
);
|
||||||
case WEEK_YEAR_VARIABLE:
|
case WEEK_YEAR_VARIABLE -> new DateObjectValueSource(
|
||||||
return new DateObjectValueSource(
|
|
||||||
fieldData,
|
fieldData,
|
||||||
MultiValueMode.MIN,
|
MultiValueMode.MIN,
|
||||||
variable,
|
variable,
|
||||||
zdt -> zdt.get(DateFormatters.WEEK_FIELDS_ROOT.weekBasedYear())
|
zdt -> zdt.get(DateFormatters.WEEK_FIELDS_ROOT.weekBasedYear())
|
||||||
);
|
);
|
||||||
case YEAR_VARIABLE:
|
case YEAR_VARIABLE -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, ZonedDateTime::getYear);
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, ZonedDateTime::getYear);
|
case YEAR_OF_CENTURY_VARIABLE -> new DateObjectValueSource(
|
||||||
case YEAR_OF_CENTURY_VARIABLE:
|
fieldData,
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, zdt -> zdt.get(ChronoField.YEAR_OF_ERA) % 100);
|
MultiValueMode.MIN,
|
||||||
case YEAR_OF_ERA_VARIABLE:
|
variable,
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, zdt -> zdt.get(ChronoField.YEAR_OF_ERA));
|
zdt -> zdt.get(ChronoField.YEAR_OF_ERA) % 100
|
||||||
default:
|
);
|
||||||
throw new IllegalArgumentException(
|
case YEAR_OF_ERA_VARIABLE -> new DateObjectValueSource(
|
||||||
|
fieldData,
|
||||||
|
MultiValueMode.MIN,
|
||||||
|
variable,
|
||||||
|
zdt -> zdt.get(ChronoField.YEAR_OF_ERA)
|
||||||
|
);
|
||||||
|
default -> throw new IllegalArgumentException(
|
||||||
"Member variable [" + variable + "] does not exist for date object on field [" + fieldName + "]."
|
"Member variable [" + variable + "] does not exist for date object on field [" + fieldName + "]."
|
||||||
);
|
);
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
static DoubleValuesSource getMethod(IndexFieldData<?> fieldData, String fieldName, String method) {
|
static DoubleValuesSource getMethod(IndexFieldData<?> fieldData, String fieldName, String method) {
|
||||||
switch (method) {
|
return switch (method) {
|
||||||
case GETCENTURY_OF_ERA_METHOD:
|
case GETCENTURY_OF_ERA_METHOD -> new DateObjectValueSource(
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, zdt -> zdt.get(ChronoField.YEAR_OF_ERA) / 100);
|
fieldData,
|
||||||
case GETDAY_OF_MONTH_METHOD:
|
MultiValueMode.MIN,
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, ZonedDateTime::getDayOfMonth);
|
method,
|
||||||
case GETDAY_OF_WEEK_METHOD:
|
zdt -> zdt.get(ChronoField.YEAR_OF_ERA) / 100
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, zdt -> zdt.getDayOfWeek().getValue());
|
);
|
||||||
case GETDAY_OF_YEAR_METHOD:
|
case GETDAY_OF_MONTH_METHOD -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, ZonedDateTime::getDayOfMonth);
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, ZonedDateTime::getDayOfYear);
|
case GETDAY_OF_WEEK_METHOD -> new DateObjectValueSource(
|
||||||
case GETERA_METHOD:
|
fieldData,
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, zdt -> zdt.get(ChronoField.ERA));
|
MultiValueMode.MIN,
|
||||||
case GETHOUR_OF_DAY_METHOD:
|
method,
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, ZonedDateTime::getHour);
|
zdt -> zdt.getDayOfWeek().getValue()
|
||||||
case GETMILLIS_OF_DAY_METHOD:
|
);
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, zdt -> zdt.get(ChronoField.MILLI_OF_DAY));
|
case GETDAY_OF_YEAR_METHOD -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, ZonedDateTime::getDayOfYear);
|
||||||
case GETMILLIS_OF_SECOND_METHOD:
|
case GETERA_METHOD -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, zdt -> zdt.get(ChronoField.ERA));
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, zdt -> zdt.get(ChronoField.MILLI_OF_SECOND));
|
case GETHOUR_OF_DAY_METHOD -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, ZonedDateTime::getHour);
|
||||||
case GETMINUTE_OF_DAY_METHOD:
|
case GETMILLIS_OF_DAY_METHOD -> new DateObjectValueSource(
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, zdt -> zdt.get(ChronoField.MINUTE_OF_DAY));
|
fieldData,
|
||||||
case GETMINUTE_OF_HOUR_METHOD:
|
MultiValueMode.MIN,
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, ZonedDateTime::getMinute);
|
method,
|
||||||
case GETMONTH_OF_YEAR_METHOD:
|
zdt -> zdt.get(ChronoField.MILLI_OF_DAY)
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, ZonedDateTime::getMonthValue);
|
);
|
||||||
case GETSECOND_OF_DAY_METHOD:
|
case GETMILLIS_OF_SECOND_METHOD -> new DateObjectValueSource(
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, zdt -> zdt.get(ChronoField.SECOND_OF_DAY));
|
fieldData,
|
||||||
case GETSECOND_OF_MINUTE_METHOD:
|
MultiValueMode.MIN,
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, ZonedDateTime::getSecond);
|
method,
|
||||||
case GETWEEK_OF_WEEK_YEAR_METHOD:
|
zdt -> zdt.get(ChronoField.MILLI_OF_SECOND)
|
||||||
return new DateObjectValueSource(
|
);
|
||||||
|
case GETMINUTE_OF_DAY_METHOD -> new DateObjectValueSource(
|
||||||
|
fieldData,
|
||||||
|
MultiValueMode.MIN,
|
||||||
|
method,
|
||||||
|
zdt -> zdt.get(ChronoField.MINUTE_OF_DAY)
|
||||||
|
);
|
||||||
|
case GETMINUTE_OF_HOUR_METHOD -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, ZonedDateTime::getMinute);
|
||||||
|
case GETMONTH_OF_YEAR_METHOD -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, ZonedDateTime::getMonthValue);
|
||||||
|
case GETSECOND_OF_DAY_METHOD -> new DateObjectValueSource(
|
||||||
|
fieldData,
|
||||||
|
MultiValueMode.MIN,
|
||||||
|
method,
|
||||||
|
zdt -> zdt.get(ChronoField.SECOND_OF_DAY)
|
||||||
|
);
|
||||||
|
case GETSECOND_OF_MINUTE_METHOD -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, ZonedDateTime::getSecond);
|
||||||
|
case GETWEEK_OF_WEEK_YEAR_METHOD -> new DateObjectValueSource(
|
||||||
fieldData,
|
fieldData,
|
||||||
MultiValueMode.MIN,
|
MultiValueMode.MIN,
|
||||||
method,
|
method,
|
||||||
zdt -> zdt.get(DateFormatters.WEEK_FIELDS_ROOT.weekOfWeekBasedYear())
|
zdt -> zdt.get(DateFormatters.WEEK_FIELDS_ROOT.weekOfWeekBasedYear())
|
||||||
);
|
);
|
||||||
case GETWEEK_YEAR_METHOD:
|
case GETWEEK_YEAR_METHOD -> new DateObjectValueSource(
|
||||||
return new DateObjectValueSource(
|
|
||||||
fieldData,
|
fieldData,
|
||||||
MultiValueMode.MIN,
|
MultiValueMode.MIN,
|
||||||
method,
|
method,
|
||||||
zdt -> zdt.get(DateFormatters.WEEK_FIELDS_ROOT.weekBasedYear())
|
zdt -> zdt.get(DateFormatters.WEEK_FIELDS_ROOT.weekBasedYear())
|
||||||
);
|
);
|
||||||
case GETYEAR_METHOD:
|
case GETYEAR_METHOD -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, ZonedDateTime::getYear);
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, ZonedDateTime::getYear);
|
case GETYEAR_OF_CENTURY_METHOD -> new DateObjectValueSource(
|
||||||
case GETYEAR_OF_CENTURY_METHOD:
|
fieldData,
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, zdt -> zdt.get(ChronoField.YEAR_OF_ERA) % 100);
|
MultiValueMode.MIN,
|
||||||
case GETYEAR_OF_ERA_METHOD:
|
method,
|
||||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, zdt -> zdt.get(ChronoField.YEAR_OF_ERA));
|
zdt -> zdt.get(ChronoField.YEAR_OF_ERA) % 100
|
||||||
default:
|
);
|
||||||
throw new IllegalArgumentException(
|
case GETYEAR_OF_ERA_METHOD -> new DateObjectValueSource(
|
||||||
|
fieldData,
|
||||||
|
MultiValueMode.MIN,
|
||||||
|
method,
|
||||||
|
zdt -> zdt.get(ChronoField.YEAR_OF_ERA)
|
||||||
|
);
|
||||||
|
default -> throw new IllegalArgumentException(
|
||||||
"Member method [" + method + "] does not exist for date object on field [" + fieldName + "]."
|
"Member method [" + method + "] does not exist for date object on field [" + fieldName + "]."
|
||||||
);
|
);
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,28 +29,24 @@ final class GeoField {
|
||||||
static final String GETLON_METHOD = "getLon";
|
static final String GETLON_METHOD = "getLon";
|
||||||
|
|
||||||
static DoubleValuesSource getVariable(IndexFieldData<?> fieldData, String fieldName, String variable) {
|
static DoubleValuesSource getVariable(IndexFieldData<?> fieldData, String fieldName, String variable) {
|
||||||
switch (variable) {
|
return switch (variable) {
|
||||||
case EMPTY_VARIABLE:
|
case EMPTY_VARIABLE -> new GeoEmptyValueSource(fieldData);
|
||||||
return new GeoEmptyValueSource(fieldData);
|
case LAT_VARIABLE -> new GeoLatitudeValueSource(fieldData);
|
||||||
case LAT_VARIABLE:
|
case LON_VARIABLE -> new GeoLongitudeValueSource(fieldData);
|
||||||
return new GeoLatitudeValueSource(fieldData);
|
default -> throw new IllegalArgumentException(
|
||||||
case LON_VARIABLE:
|
"Member variable [" + variable + "] does not exist for geo field [" + fieldName + "]."
|
||||||
return new GeoLongitudeValueSource(fieldData);
|
);
|
||||||
default:
|
};
|
||||||
throw new IllegalArgumentException("Member variable [" + variable + "] does not exist for geo field [" + fieldName + "].");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static DoubleValuesSource getMethod(IndexFieldData<?> fieldData, String fieldName, String method) {
|
static DoubleValuesSource getMethod(IndexFieldData<?> fieldData, String fieldName, String method) {
|
||||||
switch (method) {
|
return switch (method) {
|
||||||
case ISEMPTY_METHOD:
|
case ISEMPTY_METHOD -> new GeoEmptyValueSource(fieldData);
|
||||||
return new GeoEmptyValueSource(fieldData);
|
case GETLAT_METHOD -> new GeoLatitudeValueSource(fieldData);
|
||||||
case GETLAT_METHOD:
|
case GETLON_METHOD -> new GeoLongitudeValueSource(fieldData);
|
||||||
return new GeoLatitudeValueSource(fieldData);
|
default -> throw new IllegalArgumentException(
|
||||||
case GETLON_METHOD:
|
"Member method [" + method + "] does not exist for geo field [" + fieldName + "]."
|
||||||
return new GeoLongitudeValueSource(fieldData);
|
);
|
||||||
default:
|
};
|
||||||
throw new IllegalArgumentException("Member method [" + method + "] does not exist for geo field [" + fieldName + "].");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,42 +36,30 @@ final class NumericField {
|
||||||
static final String COUNT_METHOD = "count";
|
static final String COUNT_METHOD = "count";
|
||||||
|
|
||||||
static DoubleValuesSource getVariable(IndexFieldData<?> fieldData, String fieldName, String variable) {
|
static DoubleValuesSource getVariable(IndexFieldData<?> fieldData, String fieldName, String variable) {
|
||||||
switch (variable) {
|
return switch (variable) {
|
||||||
case VALUE_VARIABLE:
|
case VALUE_VARIABLE -> new FieldDataValueSource(fieldData, MultiValueMode.MIN);
|
||||||
return new FieldDataValueSource(fieldData, MultiValueMode.MIN);
|
case EMPTY_VARIABLE -> new EmptyMemberValueSource(fieldData);
|
||||||
case EMPTY_VARIABLE:
|
case LENGTH_VARIABLE -> new CountMethodValueSource(fieldData);
|
||||||
return new EmptyMemberValueSource(fieldData);
|
default -> throw new IllegalArgumentException(
|
||||||
case LENGTH_VARIABLE:
|
|
||||||
return new CountMethodValueSource(fieldData);
|
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException(
|
|
||||||
"Member variable [" + variable + "] does not exist for " + "numeric field [" + fieldName + "]."
|
"Member variable [" + variable + "] does not exist for " + "numeric field [" + fieldName + "]."
|
||||||
);
|
);
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
static DoubleValuesSource getMethod(IndexFieldData<?> fieldData, String fieldName, String method) {
|
static DoubleValuesSource getMethod(IndexFieldData<?> fieldData, String fieldName, String method) {
|
||||||
switch (method) {
|
return switch (method) {
|
||||||
case GETVALUE_METHOD:
|
case GETVALUE_METHOD -> new FieldDataValueSource(fieldData, MultiValueMode.MIN);
|
||||||
return new FieldDataValueSource(fieldData, MultiValueMode.MIN);
|
case ISEMPTY_METHOD -> new EmptyMemberValueSource(fieldData);
|
||||||
case ISEMPTY_METHOD:
|
case SIZE_METHOD -> new CountMethodValueSource(fieldData);
|
||||||
return new EmptyMemberValueSource(fieldData);
|
case MINIMUM_METHOD -> new FieldDataValueSource(fieldData, MultiValueMode.MIN);
|
||||||
case SIZE_METHOD:
|
case MAXIMUM_METHOD -> new FieldDataValueSource(fieldData, MultiValueMode.MAX);
|
||||||
return new CountMethodValueSource(fieldData);
|
case AVERAGE_METHOD -> new FieldDataValueSource(fieldData, MultiValueMode.AVG);
|
||||||
case MINIMUM_METHOD:
|
case MEDIAN_METHOD -> new FieldDataValueSource(fieldData, MultiValueMode.MEDIAN);
|
||||||
return new FieldDataValueSource(fieldData, MultiValueMode.MIN);
|
case SUM_METHOD -> new FieldDataValueSource(fieldData, MultiValueMode.SUM);
|
||||||
case MAXIMUM_METHOD:
|
case COUNT_METHOD -> new CountMethodValueSource(fieldData);
|
||||||
return new FieldDataValueSource(fieldData, MultiValueMode.MAX);
|
default -> throw new IllegalArgumentException(
|
||||||
case AVERAGE_METHOD:
|
"Member method [" + method + "] does not exist for numeric field [" + fieldName + "]."
|
||||||
return new FieldDataValueSource(fieldData, MultiValueMode.AVG);
|
);
|
||||||
case MEDIAN_METHOD:
|
};
|
||||||
return new FieldDataValueSource(fieldData, MultiValueMode.MEDIAN);
|
|
||||||
case SUM_METHOD:
|
|
||||||
return new FieldDataValueSource(fieldData, MultiValueMode.SUM);
|
|
||||||
case COUNT_METHOD:
|
|
||||||
return new CountMethodValueSource(fieldData);
|
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("Member method [" + method + "] does not exist for numeric field [" + fieldName + "].");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -156,21 +156,23 @@ public final class DefBootstrap {
|
||||||
* Does a slow lookup against the whitelist.
|
* Does a slow lookup against the whitelist.
|
||||||
*/
|
*/
|
||||||
private MethodHandle lookup(int flavorValue, String nameValue, Class<?> receiver) throws Throwable {
|
private MethodHandle lookup(int flavorValue, String nameValue, Class<?> receiver) throws Throwable {
|
||||||
switch (flavorValue) {
|
return switch (flavorValue) {
|
||||||
case METHOD_CALL:
|
case METHOD_CALL -> Def.lookupMethod(
|
||||||
return Def.lookupMethod(painlessLookup, functions, constants, methodHandlesLookup, type(), receiver, nameValue, args);
|
painlessLookup,
|
||||||
case LOAD:
|
functions,
|
||||||
return Def.lookupGetter(painlessLookup, receiver, nameValue);
|
constants,
|
||||||
case STORE:
|
methodHandlesLookup,
|
||||||
return Def.lookupSetter(painlessLookup, receiver, nameValue);
|
type(),
|
||||||
case ARRAY_LOAD:
|
receiver,
|
||||||
return Def.lookupArrayLoad(receiver);
|
nameValue,
|
||||||
case ARRAY_STORE:
|
args
|
||||||
return Def.lookupArrayStore(receiver);
|
);
|
||||||
case ITERATOR:
|
case LOAD -> Def.lookupGetter(painlessLookup, receiver, nameValue);
|
||||||
return Def.lookupIterator(receiver);
|
case STORE -> Def.lookupSetter(painlessLookup, receiver, nameValue);
|
||||||
case REFERENCE:
|
case ARRAY_LOAD -> Def.lookupArrayLoad(receiver);
|
||||||
return Def.lookupReference(
|
case ARRAY_STORE -> Def.lookupArrayStore(receiver);
|
||||||
|
case ITERATOR -> Def.lookupIterator(receiver);
|
||||||
|
case REFERENCE -> Def.lookupReference(
|
||||||
painlessLookup,
|
painlessLookup,
|
||||||
functions,
|
functions,
|
||||||
constants,
|
constants,
|
||||||
|
@ -179,11 +181,9 @@ public final class DefBootstrap {
|
||||||
receiver,
|
receiver,
|
||||||
nameValue
|
nameValue
|
||||||
);
|
);
|
||||||
case INDEX_NORMALIZE:
|
case INDEX_NORMALIZE -> Def.lookupIndexNormalize(receiver);
|
||||||
return Def.lookupIndexNormalize(receiver);
|
default -> throw new AssertionError();
|
||||||
default:
|
};
|
||||||
throw new AssertionError();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -493,7 +493,7 @@ public final class DefBootstrap {
|
||||||
// validate arguments
|
// validate arguments
|
||||||
switch (flavor) {
|
switch (flavor) {
|
||||||
// "function-call" like things get a polymorphic cache
|
// "function-call" like things get a polymorphic cache
|
||||||
case METHOD_CALL:
|
case METHOD_CALL -> {
|
||||||
if (args.length == 0) {
|
if (args.length == 0) {
|
||||||
throw new BootstrapMethodError("Invalid number of parameters for method call");
|
throw new BootstrapMethodError("Invalid number of parameters for method call");
|
||||||
}
|
}
|
||||||
|
@ -509,17 +509,14 @@ public final class DefBootstrap {
|
||||||
throw new BootstrapMethodError("Illegal number of parameters: expected " + numLambdas + " references");
|
throw new BootstrapMethodError("Illegal number of parameters: expected " + numLambdas + " references");
|
||||||
}
|
}
|
||||||
return new PIC(painlessLookup, functions, constants, methodHandlesLookup, name, type, initialDepth, flavor, args);
|
return new PIC(painlessLookup, functions, constants, methodHandlesLookup, name, type, initialDepth, flavor, args);
|
||||||
case LOAD:
|
}
|
||||||
case STORE:
|
case LOAD, STORE, ARRAY_LOAD, ARRAY_STORE, ITERATOR, INDEX_NORMALIZE -> {
|
||||||
case ARRAY_LOAD:
|
|
||||||
case ARRAY_STORE:
|
|
||||||
case ITERATOR:
|
|
||||||
case INDEX_NORMALIZE:
|
|
||||||
if (args.length > 0) {
|
if (args.length > 0) {
|
||||||
throw new BootstrapMethodError("Illegal static bootstrap parameters for flavor: " + flavor);
|
throw new BootstrapMethodError("Illegal static bootstrap parameters for flavor: " + flavor);
|
||||||
}
|
}
|
||||||
return new PIC(painlessLookup, functions, constants, methodHandlesLookup, name, type, initialDepth, flavor, args);
|
return new PIC(painlessLookup, functions, constants, methodHandlesLookup, name, type, initialDepth, flavor, args);
|
||||||
case REFERENCE:
|
}
|
||||||
|
case REFERENCE -> {
|
||||||
if (args.length != 1) {
|
if (args.length != 1) {
|
||||||
throw new BootstrapMethodError("Invalid number of parameters for reference call");
|
throw new BootstrapMethodError("Invalid number of parameters for reference call");
|
||||||
}
|
}
|
||||||
|
@ -527,11 +524,10 @@ public final class DefBootstrap {
|
||||||
throw new BootstrapMethodError("Illegal parameter for reference call: " + args[0]);
|
throw new BootstrapMethodError("Illegal parameter for reference call: " + args[0]);
|
||||||
}
|
}
|
||||||
return new PIC(painlessLookup, functions, constants, methodHandlesLookup, name, type, initialDepth, flavor, args);
|
return new PIC(painlessLookup, functions, constants, methodHandlesLookup, name, type, initialDepth, flavor, args);
|
||||||
|
}
|
||||||
|
|
||||||
// operators get monomorphic cache, with a generic impl for a fallback
|
// operators get monomorphic cache, with a generic impl for a fallback
|
||||||
case UNARY_OPERATOR:
|
case UNARY_OPERATOR, SHIFT_OPERATOR, BINARY_OPERATOR -> {
|
||||||
case SHIFT_OPERATOR:
|
|
||||||
case BINARY_OPERATOR:
|
|
||||||
if (args.length != 1) {
|
if (args.length != 1) {
|
||||||
throw new BootstrapMethodError("Invalid number of parameters for operator call");
|
throw new BootstrapMethodError("Invalid number of parameters for operator call");
|
||||||
}
|
}
|
||||||
|
@ -548,8 +544,8 @@ public final class DefBootstrap {
|
||||||
throw new BootstrapMethodError("This parameter is only supported for BINARY/SHIFT_OPERATORs");
|
throw new BootstrapMethodError("This parameter is only supported for BINARY/SHIFT_OPERATORs");
|
||||||
}
|
}
|
||||||
return new MIC(name, type, initialDepth, flavor, flags);
|
return new MIC(name, type, initialDepth, flavor, flags);
|
||||||
default:
|
}
|
||||||
throw new BootstrapMethodError("Illegal static bootstrap parameter for flavor: " + flavor);
|
default -> throw new BootstrapMethodError("Illegal static bootstrap parameter for flavor: " + flavor);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -331,16 +331,10 @@ public final class MethodWriter extends GeneratorAdapter {
|
||||||
Type methodType = Type.getMethodType(getType(returnType), getType(lhs), getType(rhs));
|
Type methodType = Type.getMethodType(getType(returnType), getType(lhs), getType(rhs));
|
||||||
|
|
||||||
switch (operation) {
|
switch (operation) {
|
||||||
case MUL:
|
case MUL -> invokeDefCall("mul", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
||||||
invokeDefCall("mul", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
case DIV -> invokeDefCall("div", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
||||||
break;
|
case REM -> invokeDefCall("rem", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
||||||
case DIV:
|
case ADD -> {
|
||||||
invokeDefCall("div", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
|
||||||
break;
|
|
||||||
case REM:
|
|
||||||
invokeDefCall("rem", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
|
||||||
break;
|
|
||||||
case ADD:
|
|
||||||
// if either side is primitive, then the + operator should always throw NPE on null,
|
// if either side is primitive, then the + operator should always throw NPE on null,
|
||||||
// so we don't need a special NPE guard.
|
// so we don't need a special NPE guard.
|
||||||
// otherwise, we need to allow nulls for possible string concatenation.
|
// otherwise, we need to allow nulls for possible string concatenation.
|
||||||
|
@ -349,30 +343,15 @@ public final class MethodWriter extends GeneratorAdapter {
|
||||||
flags |= DefBootstrap.OPERATOR_ALLOWS_NULL;
|
flags |= DefBootstrap.OPERATOR_ALLOWS_NULL;
|
||||||
}
|
}
|
||||||
invokeDefCall("add", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
invokeDefCall("add", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
||||||
break;
|
}
|
||||||
case SUB:
|
case SUB -> invokeDefCall("sub", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
||||||
invokeDefCall("sub", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
case LSH -> invokeDefCall("lsh", methodType, DefBootstrap.SHIFT_OPERATOR, flags);
|
||||||
break;
|
case USH -> invokeDefCall("ush", methodType, DefBootstrap.SHIFT_OPERATOR, flags);
|
||||||
case LSH:
|
case RSH -> invokeDefCall("rsh", methodType, DefBootstrap.SHIFT_OPERATOR, flags);
|
||||||
invokeDefCall("lsh", methodType, DefBootstrap.SHIFT_OPERATOR, flags);
|
case BWAND -> invokeDefCall("and", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
||||||
break;
|
case XOR -> invokeDefCall("xor", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
||||||
case USH:
|
case BWOR -> invokeDefCall("or", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
||||||
invokeDefCall("ush", methodType, DefBootstrap.SHIFT_OPERATOR, flags);
|
default -> throw location.createError(new IllegalStateException("Illegal tree structure."));
|
||||||
break;
|
|
||||||
case RSH:
|
|
||||||
invokeDefCall("rsh", methodType, DefBootstrap.SHIFT_OPERATOR, flags);
|
|
||||||
break;
|
|
||||||
case BWAND:
|
|
||||||
invokeDefCall("and", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
|
||||||
break;
|
|
||||||
case XOR:
|
|
||||||
invokeDefCall("xor", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
|
||||||
break;
|
|
||||||
case BWOR:
|
|
||||||
invokeDefCall("or", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw location.createError(new IllegalStateException("Illegal tree structure."));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -389,41 +368,18 @@ public final class MethodWriter extends GeneratorAdapter {
|
||||||
}
|
}
|
||||||
|
|
||||||
switch (operation) {
|
switch (operation) {
|
||||||
case MUL:
|
case MUL -> math(GeneratorAdapter.MUL, getType(clazz));
|
||||||
math(GeneratorAdapter.MUL, getType(clazz));
|
case DIV -> math(GeneratorAdapter.DIV, getType(clazz));
|
||||||
break;
|
case REM -> math(GeneratorAdapter.REM, getType(clazz));
|
||||||
case DIV:
|
case ADD -> math(GeneratorAdapter.ADD, getType(clazz));
|
||||||
math(GeneratorAdapter.DIV, getType(clazz));
|
case SUB -> math(GeneratorAdapter.SUB, getType(clazz));
|
||||||
break;
|
case LSH -> math(GeneratorAdapter.SHL, getType(clazz));
|
||||||
case REM:
|
case USH -> math(GeneratorAdapter.USHR, getType(clazz));
|
||||||
math(GeneratorAdapter.REM, getType(clazz));
|
case RSH -> math(GeneratorAdapter.SHR, getType(clazz));
|
||||||
break;
|
case BWAND -> math(GeneratorAdapter.AND, getType(clazz));
|
||||||
case ADD:
|
case XOR -> math(GeneratorAdapter.XOR, getType(clazz));
|
||||||
math(GeneratorAdapter.ADD, getType(clazz));
|
case BWOR -> math(GeneratorAdapter.OR, getType(clazz));
|
||||||
break;
|
default -> throw location.createError(new IllegalStateException("Illegal tree structure."));
|
||||||
case SUB:
|
|
||||||
math(GeneratorAdapter.SUB, getType(clazz));
|
|
||||||
break;
|
|
||||||
case LSH:
|
|
||||||
math(GeneratorAdapter.SHL, getType(clazz));
|
|
||||||
break;
|
|
||||||
case USH:
|
|
||||||
math(GeneratorAdapter.USHR, getType(clazz));
|
|
||||||
break;
|
|
||||||
case RSH:
|
|
||||||
math(GeneratorAdapter.SHR, getType(clazz));
|
|
||||||
break;
|
|
||||||
case BWAND:
|
|
||||||
math(GeneratorAdapter.AND, getType(clazz));
|
|
||||||
break;
|
|
||||||
case XOR:
|
|
||||||
math(GeneratorAdapter.XOR, getType(clazz));
|
|
||||||
break;
|
|
||||||
case BWOR:
|
|
||||||
math(GeneratorAdapter.OR, getType(clazz));
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw location.createError(new IllegalStateException("Illegal tree structure."));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -27,20 +27,14 @@ public class Json {
|
||||||
json
|
json
|
||||||
);
|
);
|
||||||
|
|
||||||
switch (parser.nextToken()) {
|
return switch (parser.nextToken()) {
|
||||||
case START_ARRAY:
|
case START_ARRAY -> parser.list();
|
||||||
return parser.list();
|
case START_OBJECT -> parser.map();
|
||||||
case START_OBJECT:
|
case VALUE_NUMBER -> parser.numberValue();
|
||||||
return parser.map();
|
case VALUE_BOOLEAN -> parser.booleanValue();
|
||||||
case VALUE_NUMBER:
|
case VALUE_STRING -> parser.text();
|
||||||
return parser.numberValue();
|
default -> null;
|
||||||
case VALUE_BOOLEAN:
|
};
|
||||||
return parser.booleanValue();
|
|
||||||
case VALUE_STRING:
|
|
||||||
return parser.text();
|
|
||||||
default:
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -2282,34 +2282,17 @@ public class DefaultSemanticAnalysisPhase extends UserTreeBaseVisitor<SemanticSc
|
||||||
for (int i = 0; i < flags.length(); ++i) {
|
for (int i = 0; i < flags.length(); ++i) {
|
||||||
char flag = flags.charAt(i);
|
char flag = flags.charAt(i);
|
||||||
|
|
||||||
switch (flag) {
|
regexFlags |= switch (flag) {
|
||||||
case 'c':
|
case 'c' -> Pattern.CANON_EQ;
|
||||||
regexFlags |= Pattern.CANON_EQ;
|
case 'i' -> Pattern.CASE_INSENSITIVE;
|
||||||
break;
|
case 'l' -> Pattern.LITERAL;
|
||||||
case 'i':
|
case 'm' -> Pattern.MULTILINE;
|
||||||
regexFlags |= Pattern.CASE_INSENSITIVE;
|
case 's' -> Pattern.DOTALL;
|
||||||
break;
|
case 'U' -> Pattern.UNICODE_CHARACTER_CLASS;
|
||||||
case 'l':
|
case 'u' -> Pattern.UNICODE_CASE;
|
||||||
regexFlags |= Pattern.LITERAL;
|
case 'x' -> Pattern.COMMENTS;
|
||||||
break;
|
default -> throw new IllegalArgumentException("invalid regular expression: unknown flag [" + flag + "]");
|
||||||
case 'm':
|
};
|
||||||
regexFlags |= Pattern.MULTILINE;
|
|
||||||
break;
|
|
||||||
case 's':
|
|
||||||
regexFlags |= Pattern.DOTALL;
|
|
||||||
break;
|
|
||||||
case 'U':
|
|
||||||
regexFlags |= Pattern.UNICODE_CHARACTER_CLASS;
|
|
||||||
break;
|
|
||||||
case 'u':
|
|
||||||
regexFlags |= Pattern.UNICODE_CASE;
|
|
||||||
break;
|
|
||||||
case 'x':
|
|
||||||
regexFlags |= Pattern.COMMENTS;
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("invalid regular expression: unknown flag [" + flag + "]");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Pattern compiled;
|
Pattern compiled;
|
||||||
|
|
|
@ -31,22 +31,15 @@ public class ListTests extends ArrayLikeObjectTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private String fillValue(String valueType) {
|
private String fillValue(String valueType) {
|
||||||
switch (valueType) {
|
return switch (valueType) {
|
||||||
case "int":
|
case "int" -> "0";
|
||||||
return "0";
|
case "long" -> "0L";
|
||||||
case "long":
|
case "short" -> "(short) 0";
|
||||||
return "0L";
|
case "byte" -> "(byte) 0";
|
||||||
case "short":
|
case "float" -> "0.0f";
|
||||||
return "(short) 0";
|
case "double" -> "0.0"; // Double is implicit for decimal constants
|
||||||
case "byte":
|
default -> null;
|
||||||
return "(byte) 0";
|
};
|
||||||
case "float":
|
|
||||||
return "0.0f";
|
|
||||||
case "double":
|
|
||||||
return "0.0"; // Double is implicit for decimal constants
|
|
||||||
default:
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -22,20 +22,12 @@ public class PainlessExecuteResponseTests extends AbstractSerializingTestCase<Pa
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected PainlessExecuteAction.Response createTestInstance() {
|
protected PainlessExecuteAction.Response createTestInstance() {
|
||||||
Object result;
|
Object result = switch (randomIntBetween(0, 2)) {
|
||||||
switch (randomIntBetween(0, 2)) {
|
case 0 -> randomAlphaOfLength(10);
|
||||||
case 0:
|
case 1 -> randomBoolean();
|
||||||
result = randomAlphaOfLength(10);
|
case 2 -> randomDoubleBetween(-10, 10, true);
|
||||||
break;
|
default -> throw new IllegalStateException("invalid branch");
|
||||||
case 1:
|
};
|
||||||
result = randomBoolean();
|
|
||||||
break;
|
|
||||||
case 2:
|
|
||||||
result = randomDoubleBetween(-10, 10, true);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new IllegalStateException("invalid branch");
|
|
||||||
}
|
|
||||||
return new PainlessExecuteAction.Response(result);
|
return new PainlessExecuteAction.Response(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -44,20 +36,12 @@ public class PainlessExecuteResponseTests extends AbstractSerializingTestCase<Pa
|
||||||
parser.nextToken(); // START-OBJECT
|
parser.nextToken(); // START-OBJECT
|
||||||
parser.nextToken(); // FIELD-NAME
|
parser.nextToken(); // FIELD-NAME
|
||||||
XContentParser.Token token = parser.nextToken(); // result value
|
XContentParser.Token token = parser.nextToken(); // result value
|
||||||
Object result;
|
Object result = switch (token) {
|
||||||
switch (token) {
|
case VALUE_STRING -> parser.text();
|
||||||
case VALUE_STRING:
|
case VALUE_BOOLEAN -> parser.booleanValue();
|
||||||
result = parser.text();
|
case VALUE_NUMBER -> parser.doubleValue();
|
||||||
break;
|
default -> throw new IOException("invalid response");
|
||||||
case VALUE_BOOLEAN:
|
};
|
||||||
result = parser.booleanValue();
|
|
||||||
break;
|
|
||||||
case VALUE_NUMBER:
|
|
||||||
result = parser.doubleValue();
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new IOException("invalid response");
|
|
||||||
}
|
|
||||||
return new PainlessExecuteAction.Response(result);
|
return new PainlessExecuteAction.Response(result);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -112,15 +112,11 @@ public class LegacyGeoShapeFieldMapper extends AbstractShapeGeometryFieldMapper<
|
||||||
public static final double DISTANCE_ERROR_PCT = 0.025d;
|
public static final double DISTANCE_ERROR_PCT = 0.025d;
|
||||||
|
|
||||||
public static int defaultTreeLevel(String tree) {
|
public static int defaultTreeLevel(String tree) {
|
||||||
switch (tree) {
|
return switch (tree) {
|
||||||
case PrefixTrees.GEOHASH:
|
case PrefixTrees.GEOHASH -> GEOHASH_TREE_LEVELS;
|
||||||
return GEOHASH_TREE_LEVELS;
|
case PrefixTrees.LEGACY_QUADTREE, PrefixTrees.QUADTREE -> QUADTREE_LEVELS;
|
||||||
case PrefixTrees.LEGACY_QUADTREE:
|
default -> throw new IllegalArgumentException("Unknown prefix type [" + tree + "]");
|
||||||
case PrefixTrees.QUADTREE:
|
};
|
||||||
return QUADTREE_LEVELS;
|
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("Unknown prefix type [" + tree + "]");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -102,26 +102,17 @@ public class GeoWKTParser {
|
||||||
throw new ElasticsearchParseException("Expected geometry type [{}] but found [{}]", shapeType, type);
|
throw new ElasticsearchParseException("Expected geometry type [{}] but found [{}]", shapeType, type);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
switch (type) {
|
return switch (type) {
|
||||||
case POINT:
|
case POINT -> parsePoint(stream, ignoreZValue, coerce);
|
||||||
return parsePoint(stream, ignoreZValue, coerce);
|
case MULTIPOINT -> parseMultiPoint(stream, ignoreZValue, coerce);
|
||||||
case MULTIPOINT:
|
case LINESTRING -> parseLine(stream, ignoreZValue, coerce);
|
||||||
return parseMultiPoint(stream, ignoreZValue, coerce);
|
case MULTILINESTRING -> parseMultiLine(stream, ignoreZValue, coerce);
|
||||||
case LINESTRING:
|
case POLYGON -> parsePolygon(stream, ignoreZValue, coerce);
|
||||||
return parseLine(stream, ignoreZValue, coerce);
|
case MULTIPOLYGON -> parseMultiPolygon(stream, ignoreZValue, coerce);
|
||||||
case MULTILINESTRING:
|
case ENVELOPE -> parseBBox(stream);
|
||||||
return parseMultiLine(stream, ignoreZValue, coerce);
|
case GEOMETRYCOLLECTION -> parseGeometryCollection(stream, ignoreZValue, coerce);
|
||||||
case POLYGON:
|
default -> throw new IllegalArgumentException("Unknown geometry type: " + type);
|
||||||
return parsePolygon(stream, ignoreZValue, coerce);
|
};
|
||||||
case MULTIPOLYGON:
|
|
||||||
return parseMultiPolygon(stream, ignoreZValue, coerce);
|
|
||||||
case ENVELOPE:
|
|
||||||
return parseBBox(stream);
|
|
||||||
case GEOMETRYCOLLECTION:
|
|
||||||
return parseGeometryCollection(stream, ignoreZValue, coerce);
|
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("Unknown geometry type: " + type);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static EnvelopeBuilder parseBBox(StreamTokenizer stream) throws IOException, ElasticsearchParseException {
|
private static EnvelopeBuilder parseBBox(StreamTokenizer stream) throws IOException, ElasticsearchParseException {
|
||||||
|
@ -317,17 +308,13 @@ public class GeoWKTParser {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static String tokenString(StreamTokenizer stream) {
|
private static String tokenString(StreamTokenizer stream) {
|
||||||
switch (stream.ttype) {
|
return switch (stream.ttype) {
|
||||||
case StreamTokenizer.TT_WORD:
|
case StreamTokenizer.TT_WORD -> stream.sval;
|
||||||
return stream.sval;
|
case StreamTokenizer.TT_EOF -> EOF;
|
||||||
case StreamTokenizer.TT_EOF:
|
case StreamTokenizer.TT_EOL -> EOL;
|
||||||
return EOF;
|
case StreamTokenizer.TT_NUMBER -> NUMBER;
|
||||||
case StreamTokenizer.TT_EOL:
|
default -> "'" + (char) stream.ttype + "'";
|
||||||
return EOL;
|
};
|
||||||
case StreamTokenizer.TT_NUMBER:
|
|
||||||
return NUMBER;
|
|
||||||
}
|
|
||||||
return "'" + (char) stream.ttype + "'";
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static boolean isNumberNext(StreamTokenizer stream) throws IOException {
|
private static boolean isNumberNext(StreamTokenizer stream) throws IOException {
|
||||||
|
|
|
@ -97,18 +97,12 @@ public class LegacyGeoShapeQueryProcessor {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static SpatialArgs getArgs(Geometry shape, ShapeRelation relation) {
|
public static SpatialArgs getArgs(Geometry shape, ShapeRelation relation) {
|
||||||
switch (relation) {
|
return switch (relation) {
|
||||||
case DISJOINT:
|
case DISJOINT -> new SpatialArgs(SpatialOperation.IsDisjointTo, buildS4J(shape));
|
||||||
return new SpatialArgs(SpatialOperation.IsDisjointTo, buildS4J(shape));
|
case INTERSECTS -> new SpatialArgs(SpatialOperation.Intersects, buildS4J(shape));
|
||||||
case INTERSECTS:
|
case WITHIN -> new SpatialArgs(SpatialOperation.IsWithin, buildS4J(shape));
|
||||||
return new SpatialArgs(SpatialOperation.Intersects, buildS4J(shape));
|
case CONTAINS -> new SpatialArgs(SpatialOperation.Contains, buildS4J(shape));
|
||||||
case WITHIN:
|
};
|
||||||
return new SpatialArgs(SpatialOperation.IsWithin, buildS4J(shape));
|
|
||||||
case CONTAINS:
|
|
||||||
return new SpatialArgs(SpatialOperation.Contains, buildS4J(shape));
|
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("invalid relation [" + relation + "]");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -35,35 +35,27 @@ public class EnvelopeBuilderTests extends AbstractShapeBuilderTestCase<EnvelopeB
|
||||||
}
|
}
|
||||||
|
|
||||||
static EnvelopeBuilder mutate(EnvelopeBuilder original) throws IOException {
|
static EnvelopeBuilder mutate(EnvelopeBuilder original) throws IOException {
|
||||||
EnvelopeBuilder mutation = copyShape(original);
|
copyShape(original);
|
||||||
// move one corner to the middle of original
|
// move one corner to the middle of original
|
||||||
switch (randomIntBetween(0, 3)) {
|
return switch (randomIntBetween(0, 3)) {
|
||||||
case 0:
|
case 0 -> new EnvelopeBuilder(
|
||||||
mutation = new EnvelopeBuilder(
|
|
||||||
new Coordinate(randomDoubleBetween(-180.0, original.bottomRight().x, true), original.topLeft().y),
|
new Coordinate(randomDoubleBetween(-180.0, original.bottomRight().x, true), original.topLeft().y),
|
||||||
original.bottomRight()
|
original.bottomRight()
|
||||||
);
|
);
|
||||||
break;
|
case 1 -> new EnvelopeBuilder(
|
||||||
case 1:
|
|
||||||
mutation = new EnvelopeBuilder(
|
|
||||||
new Coordinate(original.topLeft().x, randomDoubleBetween(original.bottomRight().y, 90.0, true)),
|
new Coordinate(original.topLeft().x, randomDoubleBetween(original.bottomRight().y, 90.0, true)),
|
||||||
original.bottomRight()
|
original.bottomRight()
|
||||||
);
|
);
|
||||||
break;
|
case 2 -> new EnvelopeBuilder(
|
||||||
case 2:
|
|
||||||
mutation = new EnvelopeBuilder(
|
|
||||||
original.topLeft(),
|
original.topLeft(),
|
||||||
new Coordinate(randomDoubleBetween(original.topLeft().x, 180.0, true), original.bottomRight().y)
|
new Coordinate(randomDoubleBetween(original.topLeft().x, 180.0, true), original.bottomRight().y)
|
||||||
);
|
);
|
||||||
break;
|
case 3 -> new EnvelopeBuilder(
|
||||||
case 3:
|
|
||||||
mutation = new EnvelopeBuilder(
|
|
||||||
original.topLeft(),
|
original.topLeft(),
|
||||||
new Coordinate(original.bottomRight().x, randomDoubleBetween(-90.0, original.topLeft().y, true))
|
new Coordinate(original.bottomRight().x, randomDoubleBetween(-90.0, original.topLeft().y, true))
|
||||||
);
|
);
|
||||||
break;
|
default -> copyShape(original);
|
||||||
}
|
};
|
||||||
return mutation;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static EnvelopeBuilder createRandomShape() {
|
static EnvelopeBuilder createRandomShape() {
|
||||||
|
|
|
@ -20,30 +20,14 @@ public class GeometryCollectionBuilderTests extends AbstractShapeBuilderTestCase
|
||||||
int shapes = randomIntBetween(0, 8);
|
int shapes = randomIntBetween(0, 8);
|
||||||
for (int i = 0; i < shapes; i++) {
|
for (int i = 0; i < shapes; i++) {
|
||||||
switch (randomIntBetween(0, 7)) {
|
switch (randomIntBetween(0, 7)) {
|
||||||
case 0:
|
case 0 -> geometryCollection.shape(PointBuilderTests.createRandomShape());
|
||||||
geometryCollection.shape(PointBuilderTests.createRandomShape());
|
case 1 -> geometryCollection.shape(CircleBuilderTests.createRandomShape());
|
||||||
break;
|
case 2 -> geometryCollection.shape(EnvelopeBuilderTests.createRandomShape());
|
||||||
case 1:
|
case 3 -> geometryCollection.shape(LineStringBuilderTests.createRandomShape());
|
||||||
geometryCollection.shape(CircleBuilderTests.createRandomShape());
|
case 4 -> geometryCollection.shape(MultiLineStringBuilderTests.createRandomShape());
|
||||||
break;
|
case 5 -> geometryCollection.shape(MultiPolygonBuilderTests.createRandomShape());
|
||||||
case 2:
|
case 6 -> geometryCollection.shape(MultiPointBuilderTests.createRandomShape());
|
||||||
geometryCollection.shape(EnvelopeBuilderTests.createRandomShape());
|
case 7 -> geometryCollection.shape(PolygonBuilderTests.createRandomShape());
|
||||||
break;
|
|
||||||
case 3:
|
|
||||||
geometryCollection.shape(LineStringBuilderTests.createRandomShape());
|
|
||||||
break;
|
|
||||||
case 4:
|
|
||||||
geometryCollection.shape(MultiLineStringBuilderTests.createRandomShape());
|
|
||||||
break;
|
|
||||||
case 5:
|
|
||||||
geometryCollection.shape(MultiPolygonBuilderTests.createRandomShape());
|
|
||||||
break;
|
|
||||||
case 6:
|
|
||||||
geometryCollection.shape(MultiPointBuilderTests.createRandomShape());
|
|
||||||
break;
|
|
||||||
case 7:
|
|
||||||
geometryCollection.shape(PolygonBuilderTests.createRandomShape());
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return geometryCollection;
|
return geometryCollection;
|
||||||
|
@ -59,35 +43,19 @@ public class GeometryCollectionBuilderTests extends AbstractShapeBuilderTestCase
|
||||||
if (mutation.shapes.size() > 0) {
|
if (mutation.shapes.size() > 0) {
|
||||||
int shapePosition = randomIntBetween(0, mutation.shapes.size() - 1);
|
int shapePosition = randomIntBetween(0, mutation.shapes.size() - 1);
|
||||||
ShapeBuilder<?, ?, ?> shapeToChange = mutation.shapes.get(shapePosition);
|
ShapeBuilder<?, ?, ?> shapeToChange = mutation.shapes.get(shapePosition);
|
||||||
switch (shapeToChange.type()) {
|
mutation.shapes.set(shapePosition, switch (shapeToChange.type()) {
|
||||||
case POINT:
|
case POINT -> PointBuilderTests.mutate((PointBuilder) shapeToChange);
|
||||||
shapeToChange = PointBuilderTests.mutate((PointBuilder) shapeToChange);
|
case CIRCLE -> CircleBuilderTests.mutate((CircleBuilder) shapeToChange);
|
||||||
break;
|
case ENVELOPE -> EnvelopeBuilderTests.mutate((EnvelopeBuilder) shapeToChange);
|
||||||
case CIRCLE:
|
case LINESTRING -> LineStringBuilderTests.mutate((LineStringBuilder) shapeToChange);
|
||||||
shapeToChange = CircleBuilderTests.mutate((CircleBuilder) shapeToChange);
|
case MULTILINESTRING -> MultiLineStringBuilderTests.mutate((MultiLineStringBuilder) shapeToChange);
|
||||||
break;
|
case MULTIPOLYGON -> MultiPolygonBuilderTests.mutate((MultiPolygonBuilder) shapeToChange);
|
||||||
case ENVELOPE:
|
case MULTIPOINT -> MultiPointBuilderTests.mutate((MultiPointBuilder) shapeToChange);
|
||||||
shapeToChange = EnvelopeBuilderTests.mutate((EnvelopeBuilder) shapeToChange);
|
case POLYGON -> PolygonBuilderTests.mutate((PolygonBuilder) shapeToChange);
|
||||||
break;
|
case GEOMETRYCOLLECTION -> throw new UnsupportedOperationException(
|
||||||
case LINESTRING:
|
"GeometryCollection should not be nested inside each other"
|
||||||
shapeToChange = LineStringBuilderTests.mutate((LineStringBuilder) shapeToChange);
|
);
|
||||||
break;
|
});
|
||||||
case MULTILINESTRING:
|
|
||||||
shapeToChange = MultiLineStringBuilderTests.mutate((MultiLineStringBuilder) shapeToChange);
|
|
||||||
break;
|
|
||||||
case MULTIPOLYGON:
|
|
||||||
shapeToChange = MultiPolygonBuilderTests.mutate((MultiPolygonBuilder) shapeToChange);
|
|
||||||
break;
|
|
||||||
case MULTIPOINT:
|
|
||||||
shapeToChange = MultiPointBuilderTests.mutate((MultiPointBuilder) shapeToChange);
|
|
||||||
break;
|
|
||||||
case POLYGON:
|
|
||||||
shapeToChange = PolygonBuilderTests.mutate((PolygonBuilder) shapeToChange);
|
|
||||||
break;
|
|
||||||
case GEOMETRYCOLLECTION:
|
|
||||||
throw new UnsupportedOperationException("GeometryCollection should not be nested inside each other");
|
|
||||||
}
|
|
||||||
mutation.shapes.set(shapePosition, shapeToChange);
|
|
||||||
} else {
|
} else {
|
||||||
mutation.shape(RandomShapeGenerator.createShape(random()));
|
mutation.shape(RandomShapeGenerator.createShape(random()));
|
||||||
}
|
}
|
||||||
|
|
|
@ -290,18 +290,13 @@ public final class RankFeatureQueryBuilder extends AbstractQueryBuilder<RankFeat
|
||||||
|
|
||||||
private static ScoreFunction readScoreFunction(StreamInput in) throws IOException {
|
private static ScoreFunction readScoreFunction(StreamInput in) throws IOException {
|
||||||
byte b = in.readByte();
|
byte b = in.readByte();
|
||||||
switch (b) {
|
return switch (b) {
|
||||||
case 0:
|
case 0 -> new ScoreFunction.Log(in);
|
||||||
return new ScoreFunction.Log(in);
|
case 1 -> new ScoreFunction.Saturation(in);
|
||||||
case 1:
|
case 2 -> new ScoreFunction.Sigmoid(in);
|
||||||
return new ScoreFunction.Saturation(in);
|
case 3 -> new ScoreFunction.Linear(in);
|
||||||
case 2:
|
default -> throw new IOException("Illegal score function id: " + b);
|
||||||
return new ScoreFunction.Sigmoid(in);
|
};
|
||||||
case 3:
|
|
||||||
return new ScoreFunction.Linear(in);
|
|
||||||
default:
|
|
||||||
throw new IOException("Illegal score function id: " + b);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static final ConstructingObjectParser<RankFeatureQueryBuilder, Void> PARSER = new ConstructingObjectParser<>("feature", args -> {
|
public static final ConstructingObjectParser<RankFeatureQueryBuilder, Void> PARSER = new ConstructingObjectParser<>("feature", args -> {
|
||||||
|
|
|
@ -341,17 +341,12 @@ public class ScaledFloatFieldMapperTests extends MapperTestCase {
|
||||||
* range of valid values.
|
* range of valid values.
|
||||||
*/
|
*/
|
||||||
double v = randomDoubleBetween(-Float.MAX_VALUE, Float.MAX_VALUE, true);
|
double v = randomDoubleBetween(-Float.MAX_VALUE, Float.MAX_VALUE, true);
|
||||||
switch (between(0, 3)) {
|
return switch (between(0, 3)) {
|
||||||
case 0:
|
case 0 -> v;
|
||||||
return v;
|
case 1 -> (float) v;
|
||||||
case 1:
|
case 2 -> Double.toString(v);
|
||||||
return (float) v;
|
case 3 -> Float.toString((float) v);
|
||||||
case 2:
|
default -> throw new IllegalArgumentException();
|
||||||
return Double.toString(v);
|
};
|
||||||
case 3:
|
|
||||||
return Float.toString((float) v);
|
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -921,31 +921,29 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
|
||||||
final int randomFrom = randomInt();
|
final int randomFrom = randomInt();
|
||||||
final byte[] encodedFrom;
|
final byte[] encodedFrom;
|
||||||
switch (encodingType) {
|
switch (encodingType) {
|
||||||
case 0:
|
case 0 -> {
|
||||||
encodedFrom = new byte[Integer.BYTES];
|
encodedFrom = new byte[Integer.BYTES];
|
||||||
IntPoint.encodeDimension(randomFrom, encodedFrom, 0);
|
IntPoint.encodeDimension(randomFrom, encodedFrom, 0);
|
||||||
break;
|
}
|
||||||
case 1:
|
case 1 -> {
|
||||||
encodedFrom = new byte[Long.BYTES];
|
encodedFrom = new byte[Long.BYTES];
|
||||||
LongPoint.encodeDimension(randomFrom, encodedFrom, 0);
|
LongPoint.encodeDimension(randomFrom, encodedFrom, 0);
|
||||||
break;
|
}
|
||||||
default:
|
default -> throw new AssertionError("unexpected encoding type [" + encodingType + "]");
|
||||||
throw new AssertionError("unexpected encoding type [" + encodingType + "]");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
final int randomTo = randomIntBetween(randomFrom, Integer.MAX_VALUE);
|
final int randomTo = randomIntBetween(randomFrom, Integer.MAX_VALUE);
|
||||||
final byte[] encodedTo;
|
final byte[] encodedTo;
|
||||||
switch (encodingType) {
|
switch (encodingType) {
|
||||||
case 0:
|
case 0 -> {
|
||||||
encodedTo = new byte[Integer.BYTES];
|
encodedTo = new byte[Integer.BYTES];
|
||||||
IntPoint.encodeDimension(randomTo, encodedTo, 0);
|
IntPoint.encodeDimension(randomTo, encodedTo, 0);
|
||||||
break;
|
}
|
||||||
case 1:
|
case 1 -> {
|
||||||
encodedTo = new byte[Long.BYTES];
|
encodedTo = new byte[Long.BYTES];
|
||||||
LongPoint.encodeDimension(randomTo, encodedTo, 0);
|
LongPoint.encodeDimension(randomTo, encodedTo, 0);
|
||||||
break;
|
}
|
||||||
default:
|
default -> throw new AssertionError("unexpected encoding type [" + encodingType + "]");
|
||||||
throw new AssertionError("unexpected encoding type [" + encodingType + "]");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
String randomFieldName = randomAlphaOfLength(5);
|
String randomFieldName = randomAlphaOfLength(5);
|
||||||
|
@ -957,20 +955,19 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
|
||||||
MurmurHash3.hash128(fieldAsBytesRef.bytes, fieldAsBytesRef.offset, fieldAsBytesRef.length, 0, hash);
|
MurmurHash3.hash128(fieldAsBytesRef.bytes, fieldAsBytesRef.offset, fieldAsBytesRef.length, 0, hash);
|
||||||
|
|
||||||
switch (encodingType) {
|
switch (encodingType) {
|
||||||
case 0:
|
case 0 -> {
|
||||||
assertEquals(hash.h1, ByteBuffer.wrap(subByteArray(result, 0, 8)).getLong());
|
assertEquals(hash.h1, ByteBuffer.wrap(subByteArray(result, 0, 8)).getLong());
|
||||||
assertEquals(randomFrom, IntPoint.decodeDimension(subByteArray(result, 12, 4), 0));
|
assertEquals(randomFrom, IntPoint.decodeDimension(subByteArray(result, 12, 4), 0));
|
||||||
assertEquals(hash.h1, ByteBuffer.wrap(subByteArray(result, 16, 8)).getLong());
|
assertEquals(hash.h1, ByteBuffer.wrap(subByteArray(result, 16, 8)).getLong());
|
||||||
assertEquals(randomTo, IntPoint.decodeDimension(subByteArray(result, 28, 4), 0));
|
assertEquals(randomTo, IntPoint.decodeDimension(subByteArray(result, 28, 4), 0));
|
||||||
break;
|
}
|
||||||
case 1:
|
case 1 -> {
|
||||||
assertEquals(hash.h1, ByteBuffer.wrap(subByteArray(result, 0, 8)).getLong());
|
assertEquals(hash.h1, ByteBuffer.wrap(subByteArray(result, 0, 8)).getLong());
|
||||||
assertEquals(randomFrom, LongPoint.decodeDimension(subByteArray(result, 8, 8), 0));
|
assertEquals(randomFrom, LongPoint.decodeDimension(subByteArray(result, 8, 8), 0));
|
||||||
assertEquals(hash.h1, ByteBuffer.wrap(subByteArray(result, 16, 8)).getLong());
|
assertEquals(hash.h1, ByteBuffer.wrap(subByteArray(result, 16, 8)).getLong());
|
||||||
assertEquals(randomTo, LongPoint.decodeDimension(subByteArray(result, 24, 8), 0));
|
assertEquals(randomTo, LongPoint.decodeDimension(subByteArray(result, 24, 8), 0));
|
||||||
break;
|
}
|
||||||
default:
|
default -> throw new AssertionError("unexpected encoding type [" + encodingType + "]");
|
||||||
throw new AssertionError("unexpected encoding type [" + encodingType + "]");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -319,23 +319,19 @@ public class DiscountedCumulativeGainTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static DiscountedCumulativeGain mutateTestItem(DiscountedCumulativeGain original) {
|
private static DiscountedCumulativeGain mutateTestItem(DiscountedCumulativeGain original) {
|
||||||
switch (randomIntBetween(0, 2)) {
|
return switch (randomIntBetween(0, 2)) {
|
||||||
case 0:
|
case 0 -> new DiscountedCumulativeGain(original.getNormalize() == false, original.getUnknownDocRating(), original.getK());
|
||||||
return new DiscountedCumulativeGain(original.getNormalize() == false, original.getUnknownDocRating(), original.getK());
|
case 1 -> new DiscountedCumulativeGain(
|
||||||
case 1:
|
|
||||||
return new DiscountedCumulativeGain(
|
|
||||||
original.getNormalize(),
|
original.getNormalize(),
|
||||||
randomValueOtherThan(original.getUnknownDocRating(), () -> randomIntBetween(0, 10)),
|
randomValueOtherThan(original.getUnknownDocRating(), () -> randomIntBetween(0, 10)),
|
||||||
original.getK()
|
original.getK()
|
||||||
);
|
);
|
||||||
case 2:
|
case 2 -> new DiscountedCumulativeGain(
|
||||||
return new DiscountedCumulativeGain(
|
|
||||||
original.getNormalize(),
|
original.getNormalize(),
|
||||||
original.getUnknownDocRating(),
|
original.getUnknownDocRating(),
|
||||||
randomValueOtherThan(original.getK(), () -> randomIntBetween(1, 10))
|
randomValueOtherThan(original.getK(), () -> randomIntBetween(1, 10))
|
||||||
);
|
);
|
||||||
default:
|
default -> throw new IllegalArgumentException("mutation variant not allowed");
|
||||||
throw new IllegalArgumentException("mutation variant not allowed");
|
};
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -52,23 +52,16 @@ public class EvalQueryQualityTests extends ESTestCase {
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
int metricDetail = randomIntBetween(0, 2);
|
int metricDetail = randomIntBetween(0, 2);
|
||||||
switch (metricDetail) {
|
switch (metricDetail) {
|
||||||
case 0:
|
case 0 -> evalQueryQuality.setMetricDetails(new PrecisionAtK.Detail(randomIntBetween(0, 1000), randomIntBetween(0, 1000)));
|
||||||
evalQueryQuality.setMetricDetails(new PrecisionAtK.Detail(randomIntBetween(0, 1000), randomIntBetween(0, 1000)));
|
case 1 -> evalQueryQuality.setMetricDetails(new MeanReciprocalRank.Detail(randomIntBetween(0, 1000)));
|
||||||
break;
|
case 2 -> evalQueryQuality.setMetricDetails(
|
||||||
case 1:
|
|
||||||
evalQueryQuality.setMetricDetails(new MeanReciprocalRank.Detail(randomIntBetween(0, 1000)));
|
|
||||||
break;
|
|
||||||
case 2:
|
|
||||||
evalQueryQuality.setMetricDetails(
|
|
||||||
new DiscountedCumulativeGain.Detail(
|
new DiscountedCumulativeGain.Detail(
|
||||||
randomDoubleBetween(0, 1, true),
|
randomDoubleBetween(0, 1, true),
|
||||||
randomBoolean() ? randomDoubleBetween(0, 1, true) : 0,
|
randomBoolean() ? randomDoubleBetween(0, 1, true) : 0,
|
||||||
randomInt()
|
randomInt()
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
break;
|
default -> throw new IllegalArgumentException("illegal randomized value in test");
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("illegal randomized value in test");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
evalQueryQuality.addHitsAndRatings(ratedHits);
|
evalQueryQuality.addHitsAndRatings(ratedHits);
|
||||||
|
|
|
@ -196,23 +196,19 @@ public class ExpectedReciprocalRankTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static ExpectedReciprocalRank mutateTestItem(ExpectedReciprocalRank original) {
|
private static ExpectedReciprocalRank mutateTestItem(ExpectedReciprocalRank original) {
|
||||||
switch (randomIntBetween(0, 2)) {
|
return switch (randomIntBetween(0, 2)) {
|
||||||
case 0:
|
case 0 -> new ExpectedReciprocalRank(original.getMaxRelevance() + 1, original.getUnknownDocRating(), original.getK());
|
||||||
return new ExpectedReciprocalRank(original.getMaxRelevance() + 1, original.getUnknownDocRating(), original.getK());
|
case 1 -> new ExpectedReciprocalRank(
|
||||||
case 1:
|
|
||||||
return new ExpectedReciprocalRank(
|
|
||||||
original.getMaxRelevance(),
|
original.getMaxRelevance(),
|
||||||
randomValueOtherThan(original.getUnknownDocRating(), () -> randomIntBetween(0, 10)),
|
randomValueOtherThan(original.getUnknownDocRating(), () -> randomIntBetween(0, 10)),
|
||||||
original.getK()
|
original.getK()
|
||||||
);
|
);
|
||||||
case 2:
|
case 2 -> new ExpectedReciprocalRank(
|
||||||
return new ExpectedReciprocalRank(
|
|
||||||
original.getMaxRelevance(),
|
original.getMaxRelevance(),
|
||||||
original.getUnknownDocRating(),
|
original.getUnknownDocRating(),
|
||||||
randomValueOtherThan(original.getK(), () -> randomIntBetween(1, 10))
|
randomValueOtherThan(original.getK(), () -> randomIntBetween(1, 10))
|
||||||
);
|
);
|
||||||
default:
|
default -> throw new IllegalArgumentException("mutation variant not allowed");
|
||||||
throw new IllegalArgumentException("mutation variant not allowed");
|
};
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -224,32 +224,24 @@ public class PrecisionAtKTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static PrecisionAtK mutate(PrecisionAtK original) {
|
private static PrecisionAtK mutate(PrecisionAtK original) {
|
||||||
PrecisionAtK pAtK;
|
PrecisionAtK pAtK = switch (randomIntBetween(0, 2)) {
|
||||||
switch (randomIntBetween(0, 2)) {
|
case 0 -> new PrecisionAtK(
|
||||||
case 0:
|
|
||||||
pAtK = new PrecisionAtK(
|
|
||||||
original.getRelevantRatingThreshold(),
|
original.getRelevantRatingThreshold(),
|
||||||
original.getIgnoreUnlabeled() == false,
|
original.getIgnoreUnlabeled() == false,
|
||||||
original.forcedSearchSize().getAsInt()
|
original.forcedSearchSize().getAsInt()
|
||||||
);
|
);
|
||||||
break;
|
case 1 -> new PrecisionAtK(
|
||||||
case 1:
|
|
||||||
pAtK = new PrecisionAtK(
|
|
||||||
randomValueOtherThan(original.getRelevantRatingThreshold(), () -> randomIntBetween(0, 10)),
|
randomValueOtherThan(original.getRelevantRatingThreshold(), () -> randomIntBetween(0, 10)),
|
||||||
original.getIgnoreUnlabeled(),
|
original.getIgnoreUnlabeled(),
|
||||||
original.forcedSearchSize().getAsInt()
|
original.forcedSearchSize().getAsInt()
|
||||||
);
|
);
|
||||||
break;
|
case 2 -> new PrecisionAtK(
|
||||||
case 2:
|
|
||||||
pAtK = new PrecisionAtK(
|
|
||||||
original.getRelevantRatingThreshold(),
|
original.getRelevantRatingThreshold(),
|
||||||
original.getIgnoreUnlabeled(),
|
original.getIgnoreUnlabeled(),
|
||||||
original.forcedSearchSize().getAsInt() + 1
|
original.forcedSearchSize().getAsInt() + 1
|
||||||
);
|
);
|
||||||
break;
|
default -> throw new IllegalStateException("The test should only allow three parameters mutated");
|
||||||
default:
|
};
|
||||||
throw new IllegalStateException("The test should only allow three parameters mutated");
|
|
||||||
}
|
|
||||||
return pAtK;
|
return pAtK;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -80,17 +80,10 @@ public class RatedDocumentTests extends ESTestCase {
|
||||||
String docId = original.getDocID();
|
String docId = original.getDocID();
|
||||||
|
|
||||||
switch (randomIntBetween(0, 2)) {
|
switch (randomIntBetween(0, 2)) {
|
||||||
case 0:
|
case 0 -> rating = randomValueOtherThan(rating, () -> randomInt());
|
||||||
rating = randomValueOtherThan(rating, () -> randomInt());
|
case 1 -> index = randomValueOtherThan(index, () -> randomAlphaOfLength(10));
|
||||||
break;
|
case 2 -> docId = randomValueOtherThan(docId, () -> randomAlphaOfLength(10));
|
||||||
case 1:
|
default -> throw new IllegalStateException("The test should only allow two parameters mutated");
|
||||||
index = randomValueOtherThan(index, () -> randomAlphaOfLength(10));
|
|
||||||
break;
|
|
||||||
case 2:
|
|
||||||
docId = randomValueOtherThan(docId, () -> randomAlphaOfLength(10));
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new IllegalStateException("The test should only allow two parameters mutated");
|
|
||||||
}
|
}
|
||||||
return new RatedDocument(index, docId, rating);
|
return new RatedDocument(index, docId, rating);
|
||||||
}
|
}
|
||||||
|
|
|
@ -40,14 +40,14 @@ public class RatedSearchHitTests extends ESTestCase {
|
||||||
OptionalInt rating = original.getRating();
|
OptionalInt rating = original.getRating();
|
||||||
SearchHit hit = original.getSearchHit();
|
SearchHit hit = original.getSearchHit();
|
||||||
switch (randomIntBetween(0, 1)) {
|
switch (randomIntBetween(0, 1)) {
|
||||||
case 0:
|
case 0 -> rating = rating.isPresent() ? OptionalInt.of(rating.getAsInt() + 1) : OptionalInt.of(randomInt(5));
|
||||||
rating = rating.isPresent() ? OptionalInt.of(rating.getAsInt() + 1) : OptionalInt.of(randomInt(5));
|
case 1 -> hit = new SearchHit(
|
||||||
break;
|
hit.docId(),
|
||||||
case 1:
|
hit.getId() + randomAlphaOfLength(10),
|
||||||
hit = new SearchHit(hit.docId(), hit.getId() + randomAlphaOfLength(10), Collections.emptyMap(), Collections.emptyMap());
|
Collections.emptyMap(),
|
||||||
break;
|
Collections.emptyMap()
|
||||||
default:
|
);
|
||||||
throw new IllegalStateException("The test should only allow two parameters mutated");
|
default -> throw new IllegalStateException("The test should only allow two parameters mutated");
|
||||||
}
|
}
|
||||||
return new RatedSearchHit(hit, rating);
|
return new RatedSearchHit(hit, rating);
|
||||||
}
|
}
|
||||||
|
|
|
@ -202,20 +202,14 @@ public class RecallAtKTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static RecallAtK mutate(RecallAtK original) {
|
private static RecallAtK mutate(RecallAtK original) {
|
||||||
RecallAtK recallAtK;
|
RecallAtK recallAtK = switch (randomIntBetween(0, 1)) {
|
||||||
switch (randomIntBetween(0, 1)) {
|
case 0 -> new RecallAtK(
|
||||||
case 0:
|
|
||||||
recallAtK = new RecallAtK(
|
|
||||||
randomValueOtherThan(original.getRelevantRatingThreshold(), () -> randomIntBetween(0, 10)),
|
randomValueOtherThan(original.getRelevantRatingThreshold(), () -> randomIntBetween(0, 10)),
|
||||||
original.forcedSearchSize().getAsInt()
|
original.forcedSearchSize().getAsInt()
|
||||||
);
|
);
|
||||||
break;
|
case 1 -> new RecallAtK(original.getRelevantRatingThreshold(), original.forcedSearchSize().getAsInt() + 1);
|
||||||
case 1:
|
default -> throw new IllegalStateException("The test should only allow two parameters mutated");
|
||||||
recallAtK = new RecallAtK(original.getRelevantRatingThreshold(), original.forcedSearchSize().getAsInt() + 1);
|
};
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new IllegalStateException("The test should only allow two parameters mutated");
|
|
||||||
}
|
|
||||||
return recallAtK;
|
return recallAtK;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -908,17 +908,20 @@ public abstract class AbstractAsyncBulkByScrollAction<
|
||||||
|
|
||||||
protected RequestWrapper<?> scriptChangedOpType(RequestWrapper<?> request, OpType oldOpType, OpType newOpType) {
|
protected RequestWrapper<?> scriptChangedOpType(RequestWrapper<?> request, OpType oldOpType, OpType newOpType) {
|
||||||
switch (newOpType) {
|
switch (newOpType) {
|
||||||
case NOOP:
|
case NOOP -> {
|
||||||
taskWorker.countNoop();
|
taskWorker.countNoop();
|
||||||
return null;
|
return null;
|
||||||
case DELETE:
|
}
|
||||||
|
case DELETE -> {
|
||||||
RequestWrapper<DeleteRequest> delete = wrap(new DeleteRequest(request.getIndex(), request.getId()));
|
RequestWrapper<DeleteRequest> delete = wrap(new DeleteRequest(request.getIndex(), request.getId()));
|
||||||
delete.setVersion(request.getVersion());
|
delete.setVersion(request.getVersion());
|
||||||
delete.setVersionType(VersionType.INTERNAL);
|
delete.setVersionType(VersionType.INTERNAL);
|
||||||
delete.setRouting(request.getRouting());
|
delete.setRouting(request.getRouting());
|
||||||
return delete;
|
return delete;
|
||||||
default:
|
}
|
||||||
throw new IllegalArgumentException("Unsupported operation type change from [" + oldOpType + "] to [" + newOpType + "]");
|
default -> throw new IllegalArgumentException(
|
||||||
|
"Unsupported operation type change from [" + oldOpType + "] to [" + newOpType + "]"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -946,18 +949,14 @@ public abstract class AbstractAsyncBulkByScrollAction<
|
||||||
|
|
||||||
public static OpType fromString(String opType) {
|
public static OpType fromString(String opType) {
|
||||||
String lowerOpType = opType.toLowerCase(Locale.ROOT);
|
String lowerOpType = opType.toLowerCase(Locale.ROOT);
|
||||||
switch (lowerOpType) {
|
return switch (lowerOpType) {
|
||||||
case "noop":
|
case "noop" -> OpType.NOOP;
|
||||||
return OpType.NOOP;
|
case "index" -> OpType.INDEX;
|
||||||
case "index":
|
case "delete" -> OpType.DELETE;
|
||||||
return OpType.INDEX;
|
default -> throw new IllegalArgumentException(
|
||||||
case "delete":
|
|
||||||
return OpType.DELETE;
|
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException(
|
|
||||||
"Operation type [" + lowerOpType + "] not allowed, only " + Arrays.toString(values()) + " are allowed"
|
"Operation type [" + lowerOpType + "] not allowed, only " + Arrays.toString(values()) + " are allowed"
|
||||||
);
|
);
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -336,14 +336,9 @@ public class Reindexer {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
switch (routingSpec) {
|
switch (routingSpec) {
|
||||||
case "keep":
|
case "keep" -> super.copyRouting(request, routing);
|
||||||
super.copyRouting(request, routing);
|
case "discard" -> super.copyRouting(request, null);
|
||||||
break;
|
default -> throw new IllegalArgumentException("Unsupported routing command");
|
||||||
case "discard":
|
|
||||||
super.copyRouting(request, null);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("Unsupported routing command");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -308,23 +308,22 @@ public class AsyncBulkByScrollActionTests extends ESTestCase {
|
||||||
boolean createdResponse;
|
boolean createdResponse;
|
||||||
DocWriteRequest.OpType opType;
|
DocWriteRequest.OpType opType;
|
||||||
switch (randomIntBetween(0, 2)) {
|
switch (randomIntBetween(0, 2)) {
|
||||||
case 0:
|
case 0 -> {
|
||||||
createdResponse = true;
|
createdResponse = true;
|
||||||
opType = DocWriteRequest.OpType.CREATE;
|
opType = DocWriteRequest.OpType.CREATE;
|
||||||
created++;
|
created++;
|
||||||
break;
|
}
|
||||||
case 1:
|
case 1 -> {
|
||||||
createdResponse = false;
|
createdResponse = false;
|
||||||
opType = randomFrom(DocWriteRequest.OpType.INDEX, DocWriteRequest.OpType.UPDATE);
|
opType = randomFrom(DocWriteRequest.OpType.INDEX, DocWriteRequest.OpType.UPDATE);
|
||||||
updated++;
|
updated++;
|
||||||
break;
|
}
|
||||||
case 2:
|
case 2 -> {
|
||||||
createdResponse = false;
|
createdResponse = false;
|
||||||
opType = DocWriteRequest.OpType.DELETE;
|
opType = DocWriteRequest.OpType.DELETE;
|
||||||
deleted++;
|
deleted++;
|
||||||
break;
|
}
|
||||||
default:
|
default -> throw new RuntimeException("Bad scenario");
|
||||||
throw new RuntimeException("Bad scenario");
|
|
||||||
}
|
}
|
||||||
final int seqNo = randomInt(20);
|
final int seqNo = randomInt(20);
|
||||||
final int primaryTerm = randomIntBetween(1, 16);
|
final int primaryTerm = randomIntBetween(1, 16);
|
||||||
|
@ -1146,17 +1145,20 @@ public class AsyncBulkByScrollActionTests extends ESTestCase {
|
||||||
int wraps = randomIntBetween(0, 4);
|
int wraps = randomIntBetween(0, 4);
|
||||||
for (int i = 0; i < wraps; i++) {
|
for (int i = 0; i < wraps; i++) {
|
||||||
switch (randomIntBetween(0, 2)) {
|
switch (randomIntBetween(0, 2)) {
|
||||||
case 0:
|
case 0 -> {
|
||||||
e = new SearchPhaseExecutionException("test", "test failure", e, new ShardSearchFailure[0]);
|
e = new SearchPhaseExecutionException("test", "test failure", e, new ShardSearchFailure[0]);
|
||||||
continue;
|
continue;
|
||||||
case 1:
|
}
|
||||||
|
case 1 -> {
|
||||||
e = new ReduceSearchPhaseException("test", "test failure", e, new ShardSearchFailure[0]);
|
e = new ReduceSearchPhaseException("test", "test failure", e, new ShardSearchFailure[0]);
|
||||||
continue;
|
continue;
|
||||||
case 2:
|
}
|
||||||
|
case 2 -> {
|
||||||
e = new ElasticsearchException(e);
|
e = new ElasticsearchException(e);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
return e;
|
return e;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -107,14 +107,11 @@ public class AzureStorageService {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
switch (proxy.type()) {
|
return switch (proxy.type()) {
|
||||||
case HTTP:
|
case HTTP -> new ProxyOptions(ProxyOptions.Type.HTTP, (InetSocketAddress) proxy.address());
|
||||||
return new ProxyOptions(ProxyOptions.Type.HTTP, (InetSocketAddress) proxy.address());
|
case SOCKS -> new ProxyOptions(ProxyOptions.Type.SOCKS5, (InetSocketAddress) proxy.address());
|
||||||
case SOCKS:
|
default -> null;
|
||||||
return new ProxyOptions(ProxyOptions.Type.SOCKS5, (InetSocketAddress) proxy.address());
|
};
|
||||||
default:
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// non-static, package private for testing
|
// non-static, package private for testing
|
||||||
|
@ -138,22 +135,11 @@ public class AzureStorageService {
|
||||||
throw new IllegalArgumentException("Unable to use " + locationMode + " location mode without a secondary location URI");
|
throw new IllegalArgumentException("Unable to use " + locationMode + " location mode without a secondary location URI");
|
||||||
}
|
}
|
||||||
|
|
||||||
final String secondaryHost;
|
final String secondaryHost = switch (locationMode) {
|
||||||
switch (locationMode) {
|
case PRIMARY_ONLY, SECONDARY_ONLY -> null;
|
||||||
case PRIMARY_ONLY:
|
case PRIMARY_THEN_SECONDARY -> secondaryUri;
|
||||||
case SECONDARY_ONLY:
|
case SECONDARY_THEN_PRIMARY -> primaryUri;
|
||||||
secondaryHost = null;
|
};
|
||||||
break;
|
|
||||||
case PRIMARY_THEN_SECONDARY:
|
|
||||||
secondaryHost = secondaryUri;
|
|
||||||
break;
|
|
||||||
case SECONDARY_THEN_PRIMARY:
|
|
||||||
secondaryHost = primaryUri;
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
assert false;
|
|
||||||
throw new AssertionError("Impossible to get here");
|
|
||||||
}
|
|
||||||
|
|
||||||
// The request retry policy uses seconds as the default time unit, since
|
// The request retry policy uses seconds as the default time unit, since
|
||||||
// it's possible to configure a timeout < 1s we should ceil that value
|
// it's possible to configure a timeout < 1s we should ceil that value
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue