mirror of
https://github.com/elastic/elasticsearch.git
synced 2025-04-24 23:27:25 -04:00
Use Java 14 switch expressions (#82178)
JEP 361[https://openjdk.java.net/jeps/361] added support for switch expressions which can be much more terse and less error-prone than switch statements. Another useful feature of switch expressions is exhaustiveness: we can make sure that an enum switch expression covers all the cases at compile time.
This commit is contained in:
parent
35a79bc7d4
commit
0699c9351f
720 changed files with 7813 additions and 13306 deletions
|
@ -89,16 +89,11 @@ public class RoundingBenchmark {
|
|||
roundingBuilder = Rounding.builder(TimeValue.parseTimeValue(interval, "interval"));
|
||||
}
|
||||
Rounding rounding = roundingBuilder.timeZone(ZoneId.of(zone)).build();
|
||||
switch (rounder) {
|
||||
case "java time":
|
||||
rounderBuilder = rounding::prepareJavaTime;
|
||||
break;
|
||||
case "es":
|
||||
rounderBuilder = () -> rounding.prepare(min, max);
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("Expectd rounder to be [java time] or [es]");
|
||||
}
|
||||
rounderBuilder = switch (rounder) {
|
||||
case "java time" -> rounding::prepareJavaTime;
|
||||
case "es" -> () -> rounding.prepare(min, max);
|
||||
default -> throw new IllegalArgumentException("Expected rounder to be [java time] or [es]");
|
||||
};
|
||||
}
|
||||
|
||||
@Benchmark
|
||||
|
|
|
@ -101,28 +101,19 @@ public class ScriptScoreBenchmark {
|
|||
|
||||
@Setup
|
||||
public void setupScript() {
|
||||
switch (script) {
|
||||
case "expression":
|
||||
factory = scriptModule.engines.get("expression").compile("test", "doc['n'].value", ScoreScript.CONTEXT, Map.of());
|
||||
break;
|
||||
case "metal":
|
||||
factory = bareMetalScript();
|
||||
break;
|
||||
case "painless_cast":
|
||||
factory = scriptModule.engines.get("painless")
|
||||
.compile(
|
||||
"test",
|
||||
"((org.elasticsearch.index.fielddata.ScriptDocValues.Longs)doc['n']).value",
|
||||
ScoreScript.CONTEXT,
|
||||
Map.of()
|
||||
);
|
||||
break;
|
||||
case "painless_def":
|
||||
factory = scriptModule.engines.get("painless").compile("test", "doc['n'].value", ScoreScript.CONTEXT, Map.of());
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("Don't know how to implement script [" + script + "]");
|
||||
}
|
||||
factory = switch (script) {
|
||||
case "expression" -> scriptModule.engines.get("expression").compile("test", "doc['n'].value", ScoreScript.CONTEXT, Map.of());
|
||||
case "metal" -> bareMetalScript();
|
||||
case "painless_cast" -> scriptModule.engines.get("painless")
|
||||
.compile(
|
||||
"test",
|
||||
"((org.elasticsearch.index.fielddata.ScriptDocValues.Longs)doc['n']).value",
|
||||
ScoreScript.CONTEXT,
|
||||
Map.of()
|
||||
);
|
||||
case "painless_def" -> scriptModule.engines.get("painless").compile("test", "doc['n'].value", ScoreScript.CONTEXT, Map.of());
|
||||
default -> throw new IllegalArgumentException("Don't know how to implement script [" + script + "]");
|
||||
};
|
||||
}
|
||||
|
||||
@Setup
|
||||
|
|
|
@ -107,20 +107,12 @@ public class AggConstructionContentionBenchmark {
|
|||
|
||||
@Setup
|
||||
public void setup() {
|
||||
switch (breaker) {
|
||||
case "real":
|
||||
breakerService = new HierarchyCircuitBreakerService(Settings.EMPTY, List.of(), clusterSettings);
|
||||
break;
|
||||
case "preallocate":
|
||||
preallocateBreaker = true;
|
||||
breakerService = new HierarchyCircuitBreakerService(Settings.EMPTY, List.of(), clusterSettings);
|
||||
break;
|
||||
case "noop":
|
||||
breakerService = new NoneCircuitBreakerService();
|
||||
break;
|
||||
default:
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
breakerService = switch (breaker) {
|
||||
case "real", "preallocate" -> new HierarchyCircuitBreakerService(Settings.EMPTY, List.of(), clusterSettings);
|
||||
case "noop" -> new NoneCircuitBreakerService();
|
||||
default -> throw new UnsupportedOperationException();
|
||||
};
|
||||
preallocateBreaker = breaker.equals("preallocate");
|
||||
bigArrays = new BigArrays(recycler, breakerService, "request");
|
||||
}
|
||||
|
||||
|
|
|
@ -52,22 +52,13 @@ public class FetchSourcePhaseBenchmark {
|
|||
|
||||
@Setup
|
||||
public void setup() throws IOException {
|
||||
switch (source) {
|
||||
case "tiny":
|
||||
sourceBytes = new BytesArray("{\"message\": \"short\"}");
|
||||
break;
|
||||
case "short":
|
||||
sourceBytes = read300BytesExample();
|
||||
break;
|
||||
case "one_4k_field":
|
||||
sourceBytes = buildBigExample("huge".repeat(1024));
|
||||
break;
|
||||
case "one_4m_field":
|
||||
sourceBytes = buildBigExample("huge".repeat(1024 * 1024));
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("Unknown source [" + source + "]");
|
||||
}
|
||||
sourceBytes = switch (source) {
|
||||
case "tiny" -> new BytesArray("{\"message\": \"short\"}");
|
||||
case "short" -> read300BytesExample();
|
||||
case "one_4k_field" -> buildBigExample("huge".repeat(1024));
|
||||
case "one_4m_field" -> buildBigExample("huge".repeat(1024 * 1024));
|
||||
default -> throw new IllegalArgumentException("Unknown source [" + source + "]");
|
||||
};
|
||||
fetchContext = new FetchSourceContext(
|
||||
true,
|
||||
Strings.splitStringByCommaToArray(includes),
|
||||
|
|
|
@ -64,20 +64,12 @@ public class FilterContentBenchmark {
|
|||
|
||||
@Setup
|
||||
public void setup() throws IOException {
|
||||
String sourceFile;
|
||||
switch (type) {
|
||||
case "cluster_stats":
|
||||
sourceFile = "monitor_cluster_stats.json";
|
||||
break;
|
||||
case "index_stats":
|
||||
sourceFile = "monitor_index_stats.json";
|
||||
break;
|
||||
case "node_stats":
|
||||
sourceFile = "monitor_node_stats.json";
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("Unknown type [" + type + "]");
|
||||
}
|
||||
String sourceFile = switch (type) {
|
||||
case "cluster_stats" -> "monitor_cluster_stats.json";
|
||||
case "index_stats" -> "monitor_index_stats.json";
|
||||
case "node_stats" -> "monitor_node_stats.json";
|
||||
default -> throw new IllegalArgumentException("Unknown type [" + type + "]");
|
||||
};
|
||||
source = readSource(sourceFile);
|
||||
filters = buildFilters();
|
||||
parserConfig = buildParseConfig();
|
||||
|
@ -87,31 +79,25 @@ public class FilterContentBenchmark {
|
|||
Map<String, Object> flattenMap = Maps.flatten(XContentHelper.convertToMap(source, true, XContentType.JSON).v2(), false, true);
|
||||
Set<String> keys = flattenMap.keySet();
|
||||
AtomicInteger count = new AtomicInteger();
|
||||
switch (fieldCount) {
|
||||
case "10_field":
|
||||
return keys.stream().filter(key -> count.getAndIncrement() % 5 == 0).limit(10).collect(Collectors.toSet());
|
||||
case "half_field":
|
||||
return keys.stream().filter(key -> count.getAndIncrement() % 2 == 0).collect(Collectors.toSet());
|
||||
case "all_field":
|
||||
return new HashSet<>(keys);
|
||||
case "wildcard_field":
|
||||
return new HashSet<>(Arrays.asList("*stats"));
|
||||
case "10_wildcard_field":
|
||||
return Set.of(
|
||||
"*stats.nodes*",
|
||||
"*stats.ind*",
|
||||
"*sta*.shards",
|
||||
"*stats*.xpack",
|
||||
"*stats.*.segments",
|
||||
"*stat*.*.data*",
|
||||
inclusive ? "*stats.**.request_cache" : "*stats.*.request_cache",
|
||||
inclusive ? "*stats.**.stat" : "*stats.*.stat",
|
||||
inclusive ? "*stats.**.threads" : "*stats.*.threads",
|
||||
"*source_node.t*"
|
||||
);
|
||||
default:
|
||||
throw new IllegalArgumentException("Unknown type [" + type + "]");
|
||||
}
|
||||
return switch (fieldCount) {
|
||||
case "10_field" -> keys.stream().filter(key -> count.getAndIncrement() % 5 == 0).limit(10).collect(Collectors.toSet());
|
||||
case "half_field" -> keys.stream().filter(key -> count.getAndIncrement() % 2 == 0).collect(Collectors.toSet());
|
||||
case "all_field" -> new HashSet<>(keys);
|
||||
case "wildcard_field" -> new HashSet<>(Arrays.asList("*stats"));
|
||||
case "10_wildcard_field" -> Set.of(
|
||||
"*stats.nodes*",
|
||||
"*stats.ind*",
|
||||
"*sta*.shards",
|
||||
"*stats*.xpack",
|
||||
"*stats.*.segments",
|
||||
"*stat*.*.data*",
|
||||
inclusive ? "*stats.**.request_cache" : "*stats.*.request_cache",
|
||||
inclusive ? "*stats.**.stat" : "*stats.*.stat",
|
||||
inclusive ? "*stats.**.threads" : "*stats.*.threads",
|
||||
"*source_node.t*"
|
||||
);
|
||||
default -> throw new IllegalArgumentException("Unknown type [" + type + "]");
|
||||
};
|
||||
}
|
||||
|
||||
@Benchmark
|
||||
|
|
|
@ -44,18 +44,11 @@ public class SymbolicLinkPreservingTar extends Tar {
|
|||
|
||||
@Override
|
||||
protected CopyAction createCopyAction() {
|
||||
final ArchiveOutputStreamFactory compressor;
|
||||
switch (getCompression()) {
|
||||
case BZIP2:
|
||||
compressor = Bzip2Archiver.getCompressor();
|
||||
break;
|
||||
case GZIP:
|
||||
compressor = GzipArchiver.getCompressor();
|
||||
break;
|
||||
default:
|
||||
compressor = new SimpleCompressor();
|
||||
break;
|
||||
}
|
||||
final ArchiveOutputStreamFactory compressor = switch (getCompression()) {
|
||||
case BZIP2 -> Bzip2Archiver.getCompressor();
|
||||
case GZIP -> GzipArchiver.getCompressor();
|
||||
default -> new SimpleCompressor();
|
||||
};
|
||||
return new SymbolicLinkPreservingTarCopyAction(getArchiveFile(), compressor, isPreserveFileTimestamps());
|
||||
}
|
||||
|
||||
|
|
|
@ -87,23 +87,17 @@ public class TransformLog4jConfigFilter extends FilterReader {
|
|||
}
|
||||
|
||||
switch (keyParts[2]) {
|
||||
case "type":
|
||||
case "type" -> {
|
||||
if (value.equals("RollingFile")) {
|
||||
value = "Console";
|
||||
}
|
||||
line = key + " = " + value;
|
||||
break;
|
||||
|
||||
case "fileName":
|
||||
case "filePattern":
|
||||
case "policies":
|
||||
case "strategy":
|
||||
}
|
||||
case "fileName", "filePattern", "policies", "strategy" -> {
|
||||
// No longer applicable. Omit it.
|
||||
skipNext = line.endsWith("\\");
|
||||
continue;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else if (line.startsWith("rootLogger.appenderRef")) {
|
||||
String[] parts = line.split("\\s*=\\s*");
|
||||
|
|
|
@ -21,15 +21,11 @@ public enum Architecture {
|
|||
|
||||
public static Architecture current() {
|
||||
final String architecture = System.getProperty("os.arch", "");
|
||||
switch (architecture) {
|
||||
case "amd64":
|
||||
case "x86_64":
|
||||
return X64;
|
||||
case "aarch64":
|
||||
return AARCH64;
|
||||
default:
|
||||
throw new IllegalArgumentException("can not determine architecture from [" + architecture + "]");
|
||||
}
|
||||
return switch (architecture) {
|
||||
case "amd64", "x86_64" -> X64;
|
||||
case "aarch64" -> AARCH64;
|
||||
default -> throw new IllegalArgumentException("can not determine architecture from [" + architecture + "]");
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -36,16 +36,12 @@ public abstract class AbstractBenchmark<T extends Closeable> {
|
|||
System.exit(1);
|
||||
}
|
||||
switch (args[0]) {
|
||||
case "search":
|
||||
runSearchBenchmark(args);
|
||||
break;
|
||||
case "bulk":
|
||||
runBulkIndexBenchmark(args);
|
||||
break;
|
||||
default:
|
||||
case "search" -> runSearchBenchmark(args);
|
||||
case "bulk" -> runBulkIndexBenchmark(args);
|
||||
default -> {
|
||||
System.err.println("Unknown benchmark type [" + args[0] + "]");
|
||||
System.exit(1);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -16,15 +16,14 @@ public class BenchmarkMain {
|
|||
@SuppressForbidden(reason = "system out is ok for a command line tool")
|
||||
public static void main(String[] args) throws Exception {
|
||||
String type = args[0];
|
||||
AbstractBenchmark<?> benchmark = null;
|
||||
switch (type) {
|
||||
case "rest":
|
||||
benchmark = new RestClientBenchmark();
|
||||
break;
|
||||
default:
|
||||
AbstractBenchmark<?> benchmark = switch (type) {
|
||||
case "rest" -> new RestClientBenchmark();
|
||||
default -> {
|
||||
System.err.println("Unknown client type [" + type + "]");
|
||||
System.exit(1);
|
||||
}
|
||||
yield null;
|
||||
}
|
||||
};
|
||||
benchmark.run(Arrays.copyOfRange(args, 1, args.length));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -163,14 +163,11 @@ public final class FollowInfoResponse {
|
|||
}
|
||||
|
||||
public static Status fromString(String value) {
|
||||
switch (value) {
|
||||
case "active":
|
||||
return Status.ACTIVE;
|
||||
case "paused":
|
||||
return Status.PAUSED;
|
||||
default:
|
||||
throw new IllegalArgumentException("unexpected status value [" + value + "]");
|
||||
}
|
||||
return switch (value) {
|
||||
case "active" -> Status.ACTIVE;
|
||||
case "paused" -> Status.PAUSED;
|
||||
default -> throw new IllegalArgumentException("unexpected status value [" + value + "]");
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -149,20 +149,11 @@ public class GetIndexResponse {
|
|||
parser.nextToken();
|
||||
if (parser.currentToken() == Token.START_OBJECT) {
|
||||
switch (parser.currentName()) {
|
||||
case "aliases":
|
||||
indexAliases = parseAliases(parser);
|
||||
break;
|
||||
case "mappings":
|
||||
indexMappings = parseMappings(parser);
|
||||
break;
|
||||
case "settings":
|
||||
indexSettings = Settings.fromXContent(parser);
|
||||
break;
|
||||
case "defaults":
|
||||
indexDefaultSettings = Settings.fromXContent(parser);
|
||||
break;
|
||||
default:
|
||||
parser.skipChildren();
|
||||
case "aliases" -> indexAliases = parseAliases(parser);
|
||||
case "mappings" -> indexMappings = parseMappings(parser);
|
||||
case "settings" -> indexSettings = Settings.fromXContent(parser);
|
||||
case "defaults" -> indexDefaultSettings = Settings.fromXContent(parser);
|
||||
default -> parser.skipChildren();
|
||||
}
|
||||
} else if (parser.currentToken() == Token.VALUE_STRING) {
|
||||
if (parser.currentName().equals("data_stream")) {
|
||||
|
|
|
@ -27,15 +27,11 @@ public enum LicenseStatus {
|
|||
}
|
||||
|
||||
public static LicenseStatus fromString(String value) {
|
||||
switch (value) {
|
||||
case "active":
|
||||
return ACTIVE;
|
||||
case "invalid":
|
||||
return INVALID;
|
||||
case "expired":
|
||||
return EXPIRED;
|
||||
default:
|
||||
throw new IllegalArgumentException("unknown license status [" + value + "]");
|
||||
}
|
||||
return switch (value) {
|
||||
case "active" -> ACTIVE;
|
||||
case "invalid" -> INVALID;
|
||||
case "expired" -> EXPIRED;
|
||||
default -> throw new IllegalArgumentException("unknown license status [" + value + "]");
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -42,15 +42,11 @@ public enum LicensesStatus {
|
|||
}
|
||||
|
||||
public static LicensesStatus fromString(String value) {
|
||||
switch (value) {
|
||||
case "valid":
|
||||
return VALID;
|
||||
case "invalid":
|
||||
return INVALID;
|
||||
case "expired":
|
||||
return EXPIRED;
|
||||
default:
|
||||
throw new IllegalArgumentException("unknown licenses status [" + value + "]");
|
||||
}
|
||||
return switch (value) {
|
||||
case "valid" -> VALID;
|
||||
case "invalid" -> INVALID;
|
||||
case "expired" -> EXPIRED;
|
||||
default -> throw new IllegalArgumentException("unknown licenses status [" + value + "]");
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -164,25 +164,16 @@ public final class RoleMapperExpressionParser {
|
|||
}
|
||||
|
||||
private Object parseFieldValue(XContentParser parser) throws IOException {
|
||||
switch (parser.currentToken()) {
|
||||
case VALUE_STRING:
|
||||
return parser.text();
|
||||
|
||||
case VALUE_BOOLEAN:
|
||||
return parser.booleanValue();
|
||||
|
||||
case VALUE_NUMBER:
|
||||
return parser.longValue();
|
||||
|
||||
case VALUE_NULL:
|
||||
return null;
|
||||
|
||||
default:
|
||||
throw new ElasticsearchParseException(
|
||||
"failed to parse rules expression. expected a field value but found [{}] instead",
|
||||
parser.currentToken()
|
||||
);
|
||||
}
|
||||
return switch (parser.currentToken()) {
|
||||
case VALUE_STRING -> parser.text();
|
||||
case VALUE_BOOLEAN -> parser.booleanValue();
|
||||
case VALUE_NUMBER -> parser.longValue();
|
||||
case VALUE_NULL -> null;
|
||||
default -> throw new ElasticsearchParseException(
|
||||
"failed to parse rules expression. expected a field value but found [{}] instead",
|
||||
parser.currentToken()
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -75,24 +75,17 @@ public class GroupConfig implements ToXContentObject {
|
|||
continue;
|
||||
}
|
||||
|
||||
SingleGroupSource groupSource = null;
|
||||
switch (groupType) {
|
||||
case "terms":
|
||||
groupSource = TermsGroupSource.fromXContent(parser);
|
||||
break;
|
||||
case "histogram":
|
||||
groupSource = HistogramGroupSource.fromXContent(parser);
|
||||
break;
|
||||
case "date_histogram":
|
||||
groupSource = DateHistogramGroupSource.fromXContent(parser);
|
||||
break;
|
||||
case "geotile_grid":
|
||||
groupSource = GeoTileGroupSource.fromXContent(parser);
|
||||
break;
|
||||
default:
|
||||
SingleGroupSource groupSource = switch (groupType) {
|
||||
case "terms" -> TermsGroupSource.fromXContent(parser);
|
||||
case "histogram" -> HistogramGroupSource.fromXContent(parser);
|
||||
case "date_histogram" -> DateHistogramGroupSource.fromXContent(parser);
|
||||
case "geotile_grid" -> GeoTileGroupSource.fromXContent(parser);
|
||||
default -> {
|
||||
// not a valid group source. Consume up to the dest field end object
|
||||
consumeUntilEndObject(parser, 2);
|
||||
}
|
||||
yield null;
|
||||
}
|
||||
};
|
||||
|
||||
if (groupSource != null) {
|
||||
groups.put(destinationFieldName, groupSource);
|
||||
|
|
|
@ -69,29 +69,28 @@ public class ClusterRequestConvertersTests extends ESTestCase {
|
|||
String timeout = ESTestCase.randomTimeValue();
|
||||
String masterTimeout = ESTestCase.randomTimeValue();
|
||||
switch (timeoutType) {
|
||||
case "timeout":
|
||||
case "timeout" -> {
|
||||
healthRequest.timeout(timeout);
|
||||
expectedParams.put("timeout", timeout);
|
||||
// If Master Timeout wasn't set it uses the same value as Timeout
|
||||
expectedParams.put("master_timeout", timeout);
|
||||
break;
|
||||
case "masterTimeout":
|
||||
}
|
||||
case "masterTimeout" -> {
|
||||
expectedParams.put("timeout", "30s");
|
||||
healthRequest.masterNodeTimeout(masterTimeout);
|
||||
expectedParams.put("master_timeout", masterTimeout);
|
||||
break;
|
||||
case "both":
|
||||
}
|
||||
case "both" -> {
|
||||
healthRequest.timeout(timeout);
|
||||
expectedParams.put("timeout", timeout);
|
||||
healthRequest.masterNodeTimeout(timeout);
|
||||
expectedParams.put("master_timeout", timeout);
|
||||
break;
|
||||
case "none":
|
||||
}
|
||||
case "none" -> {
|
||||
expectedParams.put("timeout", "30s");
|
||||
expectedParams.put("master_timeout", "30s");
|
||||
break;
|
||||
default:
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
default -> throw new UnsupportedOperationException();
|
||||
}
|
||||
RequestConvertersTests.setRandomWaitForActiveShards(healthRequest::waitForActiveShards, ActiveShardCount.NONE, expectedParams);
|
||||
if (ESTestCase.randomBoolean()) {
|
||||
|
|
|
@ -356,20 +356,11 @@ public class RollupIT extends ESRestHighLevelClientTestCase {
|
|||
List<Map<String, Object>> timestampCaps = fieldCaps.get("date").getAggs();
|
||||
for (Map.Entry<String, Object> entry : timestampCaps.get(0).entrySet()) {
|
||||
switch (entry.getKey()) {
|
||||
case "agg":
|
||||
assertThat(entry.getValue(), equalTo("date_histogram"));
|
||||
break;
|
||||
case "delay":
|
||||
assertThat(entry.getValue(), equalTo("foo"));
|
||||
break;
|
||||
case "calendar_interval":
|
||||
assertThat(entry.getValue(), equalTo("1d"));
|
||||
break;
|
||||
case "time_zone":
|
||||
assertThat(entry.getValue(), equalTo("UTC"));
|
||||
break;
|
||||
default:
|
||||
fail("Unknown field cap: [" + entry.getKey() + "]");
|
||||
case "agg" -> assertThat(entry.getValue(), equalTo("date_histogram"));
|
||||
case "delay" -> assertThat(entry.getValue(), equalTo("foo"));
|
||||
case "calendar_interval" -> assertThat(entry.getValue(), equalTo("1d"));
|
||||
case "time_zone" -> assertThat(entry.getValue(), equalTo("UTC"));
|
||||
default -> fail("Unknown field cap: [" + entry.getKey() + "]");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -471,20 +462,11 @@ public class RollupIT extends ESRestHighLevelClientTestCase {
|
|||
List<Map<String, Object>> timestampCaps = fieldCaps.get("date").getAggs();
|
||||
for (Map.Entry<String, Object> entry : timestampCaps.get(0).entrySet()) {
|
||||
switch (entry.getKey()) {
|
||||
case "agg":
|
||||
assertThat(entry.getValue(), equalTo("date_histogram"));
|
||||
break;
|
||||
case "delay":
|
||||
assertThat(entry.getValue(), equalTo("foo"));
|
||||
break;
|
||||
case "calendar_interval":
|
||||
assertThat(entry.getValue(), equalTo("1d"));
|
||||
break;
|
||||
case "time_zone":
|
||||
assertThat(entry.getValue(), equalTo("UTC"));
|
||||
break;
|
||||
default:
|
||||
fail("Unknown field cap: [" + entry.getKey() + "]");
|
||||
case "agg" -> assertThat(entry.getValue(), equalTo("date_histogram"));
|
||||
case "delay" -> assertThat(entry.getValue(), equalTo("foo"));
|
||||
case "calendar_interval" -> assertThat(entry.getValue(), equalTo("1d"));
|
||||
case "time_zone" -> assertThat(entry.getValue(), equalTo("UTC"));
|
||||
default -> fail("Unknown field cap: [" + entry.getKey() + "]");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -27,19 +27,16 @@ public class XPackRequestConvertersTests extends ESTestCase {
|
|||
}
|
||||
int option = ESTestCase.between(0, 2);
|
||||
switch (option) {
|
||||
case 0:
|
||||
infoRequest.setCategories(EnumSet.allOf(XPackInfoRequest.Category.class));
|
||||
break;
|
||||
case 1:
|
||||
case 0 -> infoRequest.setCategories(EnumSet.allOf(XPackInfoRequest.Category.class));
|
||||
case 1 -> {
|
||||
infoRequest.setCategories(EnumSet.of(XPackInfoRequest.Category.FEATURES));
|
||||
expectedParams.put("categories", "features");
|
||||
break;
|
||||
case 2:
|
||||
}
|
||||
case 2 -> {
|
||||
infoRequest.setCategories(EnumSet.of(XPackInfoRequest.Category.FEATURES, XPackInfoRequest.Category.BUILD));
|
||||
expectedParams.put("categories", "build,features");
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("invalid option [" + option + "]");
|
||||
}
|
||||
default -> throw new IllegalArgumentException("invalid option [" + option + "]");
|
||||
}
|
||||
|
||||
Request request = XPackRequestConverters.info(infoRequest);
|
||||
|
|
|
@ -116,17 +116,19 @@ public class EqlSearchResponseTests extends AbstractResponseTestCase<
|
|||
|
||||
private static Tuple<DocumentField, DocumentField> randomDocumentField(XContentType xType) {
|
||||
switch (randomIntBetween(0, 2)) {
|
||||
case 0:
|
||||
case 0 -> {
|
||||
String fieldName = randomAlphaOfLengthBetween(3, 10);
|
||||
Tuple<List<Object>, List<Object>> tuple = RandomObjects.randomStoredFieldValues(random(), xType);
|
||||
DocumentField input = new DocumentField(fieldName, tuple.v1());
|
||||
DocumentField expected = new DocumentField(fieldName, tuple.v2());
|
||||
return Tuple.tuple(input, expected);
|
||||
case 1:
|
||||
}
|
||||
case 1 -> {
|
||||
List<Object> listValues = randomList(1, 5, () -> randomList(1, 5, ESTestCase::randomInt));
|
||||
DocumentField listField = new DocumentField(randomAlphaOfLength(5), listValues);
|
||||
return Tuple.tuple(listField, listField);
|
||||
case 2:
|
||||
}
|
||||
case 2 -> {
|
||||
List<Object> objectValues = randomList(
|
||||
1,
|
||||
5,
|
||||
|
@ -141,8 +143,8 @@ public class EqlSearchResponseTests extends AbstractResponseTestCase<
|
|||
);
|
||||
DocumentField objectField = new DocumentField(randomAlphaOfLength(5), objectValues);
|
||||
return Tuple.tuple(objectField, objectField);
|
||||
default:
|
||||
throw new IllegalStateException();
|
||||
}
|
||||
default -> throw new IllegalStateException();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -212,14 +214,11 @@ public class EqlSearchResponseTests extends AbstractResponseTestCase<
|
|||
|
||||
public static org.elasticsearch.xpack.eql.action.EqlSearchResponse createRandomInstance(TotalHits totalHits, XContentType xType) {
|
||||
int type = between(0, 1);
|
||||
switch (type) {
|
||||
case 0:
|
||||
return createRandomEventsResponse(totalHits, xType);
|
||||
case 1:
|
||||
return createRandomSequencesResponse(totalHits, xType);
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
return switch (type) {
|
||||
case 0 -> createRandomEventsResponse(totalHits, xType);
|
||||
case 1 -> createRandomSequencesResponse(totalHits, xType);
|
||||
default -> null;
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -49,29 +49,24 @@ public class ExplainLifecycleRequestTests extends ESTestCase {
|
|||
String[] indices = instance.getIndices();
|
||||
IndicesOptions indicesOptions = instance.indicesOptions();
|
||||
switch (between(0, 1)) {
|
||||
case 0:
|
||||
indices = randomValueOtherThanMany(
|
||||
i -> Arrays.equals(i, instance.getIndices()),
|
||||
() -> generateRandomStringArray(20, 10, false, false)
|
||||
);
|
||||
break;
|
||||
case 1:
|
||||
indicesOptions = randomValueOtherThan(
|
||||
indicesOptions,
|
||||
() -> IndicesOptions.fromOptions(
|
||||
randomBoolean(),
|
||||
randomBoolean(),
|
||||
randomBoolean(),
|
||||
randomBoolean(),
|
||||
randomBoolean(),
|
||||
randomBoolean(),
|
||||
randomBoolean(),
|
||||
randomBoolean()
|
||||
)
|
||||
);
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Illegal randomisation branch");
|
||||
case 0 -> indices = randomValueOtherThanMany(
|
||||
i -> Arrays.equals(i, instance.getIndices()),
|
||||
() -> generateRandomStringArray(20, 10, false, false)
|
||||
);
|
||||
case 1 -> indicesOptions = randomValueOtherThan(
|
||||
instance.indicesOptions(),
|
||||
() -> IndicesOptions.fromOptions(
|
||||
randomBoolean(),
|
||||
randomBoolean(),
|
||||
randomBoolean(),
|
||||
randomBoolean(),
|
||||
randomBoolean(),
|
||||
randomBoolean(),
|
||||
randomBoolean(),
|
||||
randomBoolean()
|
||||
)
|
||||
);
|
||||
default -> throw new AssertionError("Illegal randomisation branch");
|
||||
}
|
||||
ExplainLifecycleRequest newRequest = new ExplainLifecycleRequest(indices);
|
||||
newRequest.indicesOptions(indicesOptions);
|
||||
|
|
|
@ -226,60 +226,37 @@ public class LifecyclePolicyTests extends AbstractXContentTestCase<LifecyclePoli
|
|||
List<String> phaseNames = Arrays.asList("hot", "warm", "cold", "delete");
|
||||
Map<String, Phase> phases = new HashMap<>(phaseNames.size());
|
||||
Function<String, Set<String>> validActions = (phase) -> {
|
||||
switch (phase) {
|
||||
case "hot":
|
||||
return VALID_HOT_ACTIONS;
|
||||
case "warm":
|
||||
return VALID_WARM_ACTIONS;
|
||||
case "cold":
|
||||
return VALID_COLD_ACTIONS;
|
||||
case "delete":
|
||||
return VALID_DELETE_ACTIONS;
|
||||
default:
|
||||
throw new IllegalArgumentException("invalid phase [" + phase + "]");
|
||||
}
|
||||
return switch (phase) {
|
||||
case "hot" -> VALID_HOT_ACTIONS;
|
||||
case "warm" -> VALID_WARM_ACTIONS;
|
||||
case "cold" -> VALID_COLD_ACTIONS;
|
||||
case "delete" -> VALID_DELETE_ACTIONS;
|
||||
default -> throw new IllegalArgumentException("invalid phase [" + phase + "]");
|
||||
};
|
||||
};
|
||||
Function<String, Boolean> allowEmptyActions = (phase) -> {
|
||||
switch (phase) {
|
||||
case "hot":
|
||||
case "warm":
|
||||
case "cold":
|
||||
return true;
|
||||
case "delete":
|
||||
return false;
|
||||
default:
|
||||
throw new IllegalArgumentException("invalid phase [" + phase + "]");
|
||||
}
|
||||
return switch (phase) {
|
||||
case "hot", "warm", "cold" -> true;
|
||||
case "delete" -> false;
|
||||
default -> throw new IllegalArgumentException("invalid phase [" + phase + "]");
|
||||
};
|
||||
};
|
||||
Function<String, LifecycleAction> randomAction = (action) -> {
|
||||
switch (action) {
|
||||
case AllocateAction.NAME:
|
||||
return AllocateActionTests.randomInstance();
|
||||
case DeleteAction.NAME:
|
||||
return new DeleteAction();
|
||||
case ForceMergeAction.NAME:
|
||||
return ForceMergeActionTests.randomInstance();
|
||||
case ReadOnlyAction.NAME:
|
||||
return new ReadOnlyAction();
|
||||
case RolloverAction.NAME:
|
||||
return RolloverActionTests.randomInstance();
|
||||
case ShrinkAction.NAME:
|
||||
return ShrinkActionTests.randomInstance();
|
||||
case FreezeAction.NAME:
|
||||
return new FreezeAction();
|
||||
case WaitForSnapshotAction.NAME:
|
||||
return WaitForSnapshotActionTests.randomInstance();
|
||||
case SetPriorityAction.NAME:
|
||||
return SetPriorityActionTests.randomInstance();
|
||||
case UnfollowAction.NAME:
|
||||
return new UnfollowAction();
|
||||
case SearchableSnapshotAction.NAME:
|
||||
return new SearchableSnapshotAction("repo", randomBoolean());
|
||||
case MigrateAction.NAME:
|
||||
return new MigrateAction(randomBoolean());
|
||||
default:
|
||||
throw new IllegalArgumentException("invalid action [" + action + "]");
|
||||
}
|
||||
return switch (action) {
|
||||
case AllocateAction.NAME -> AllocateActionTests.randomInstance();
|
||||
case DeleteAction.NAME -> new DeleteAction();
|
||||
case ForceMergeAction.NAME -> ForceMergeActionTests.randomInstance();
|
||||
case ReadOnlyAction.NAME -> new ReadOnlyAction();
|
||||
case RolloverAction.NAME -> RolloverActionTests.randomInstance();
|
||||
case ShrinkAction.NAME -> ShrinkActionTests.randomInstance();
|
||||
case FreezeAction.NAME -> new FreezeAction();
|
||||
case WaitForSnapshotAction.NAME -> WaitForSnapshotActionTests.randomInstance();
|
||||
case SetPriorityAction.NAME -> SetPriorityActionTests.randomInstance();
|
||||
case UnfollowAction.NAME -> new UnfollowAction();
|
||||
case SearchableSnapshotAction.NAME -> new SearchableSnapshotAction("repo", randomBoolean());
|
||||
case MigrateAction.NAME -> new MigrateAction(randomBoolean());
|
||||
default -> throw new IllegalArgumentException("invalid action [" + action + "]");
|
||||
};
|
||||
};
|
||||
TimeValue prev = null;
|
||||
boolean searchableSnapshotSeen = false;
|
||||
|
@ -318,33 +295,20 @@ public class LifecyclePolicyTests extends AbstractXContentTestCase<LifecyclePoli
|
|||
}
|
||||
|
||||
private LifecycleAction getTestAction(String actionName) {
|
||||
switch (actionName) {
|
||||
case AllocateAction.NAME:
|
||||
return AllocateActionTests.randomInstance();
|
||||
case DeleteAction.NAME:
|
||||
return new DeleteAction();
|
||||
case ForceMergeAction.NAME:
|
||||
return ForceMergeActionTests.randomInstance();
|
||||
case ReadOnlyAction.NAME:
|
||||
return new ReadOnlyAction();
|
||||
case RolloverAction.NAME:
|
||||
return RolloverActionTests.randomInstance();
|
||||
case ShrinkAction.NAME:
|
||||
return ShrinkActionTests.randomInstance();
|
||||
case FreezeAction.NAME:
|
||||
return new FreezeAction();
|
||||
case WaitForSnapshotAction.NAME:
|
||||
return WaitForSnapshotActionTests.randomInstance();
|
||||
case SetPriorityAction.NAME:
|
||||
return SetPriorityActionTests.randomInstance();
|
||||
case SearchableSnapshotAction.NAME:
|
||||
return SearchableSnapshotActionTests.randomInstance();
|
||||
case UnfollowAction.NAME:
|
||||
return new UnfollowAction();
|
||||
case MigrateAction.NAME:
|
||||
return new MigrateAction(randomBoolean());
|
||||
default:
|
||||
throw new IllegalArgumentException("unsupported phase action [" + actionName + "]");
|
||||
}
|
||||
return switch (actionName) {
|
||||
case AllocateAction.NAME -> AllocateActionTests.randomInstance();
|
||||
case DeleteAction.NAME -> new DeleteAction();
|
||||
case ForceMergeAction.NAME -> ForceMergeActionTests.randomInstance();
|
||||
case ReadOnlyAction.NAME -> new ReadOnlyAction();
|
||||
case RolloverAction.NAME -> RolloverActionTests.randomInstance();
|
||||
case ShrinkAction.NAME -> ShrinkActionTests.randomInstance();
|
||||
case FreezeAction.NAME -> new FreezeAction();
|
||||
case WaitForSnapshotAction.NAME -> WaitForSnapshotActionTests.randomInstance();
|
||||
case SetPriorityAction.NAME -> SetPriorityActionTests.randomInstance();
|
||||
case SearchableSnapshotAction.NAME -> SearchableSnapshotActionTests.randomInstance();
|
||||
case UnfollowAction.NAME -> new UnfollowAction();
|
||||
case MigrateAction.NAME -> new MigrateAction(randomBoolean());
|
||||
default -> throw new IllegalArgumentException("unsupported phase action [" + actionName + "]");
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,43 +27,40 @@ import org.elasticsearch.xcontent.XContentParser;
|
|||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
public class EvaluateDataFrameResponseTests extends AbstractXContentTestCase<EvaluateDataFrameResponse> {
|
||||
private enum Evaluation {
|
||||
OUTLIER_DETECTION,
|
||||
CLASSIFICATION,
|
||||
REGRESSION
|
||||
}
|
||||
|
||||
public static EvaluateDataFrameResponse randomResponse() {
|
||||
String evaluationName = randomFrom(OutlierDetection.NAME, Classification.NAME, Regression.NAME);
|
||||
List<EvaluationMetric.Result> metrics;
|
||||
switch (evaluationName) {
|
||||
case OutlierDetection.NAME:
|
||||
metrics = randomSubsetOf(
|
||||
Arrays.asList(
|
||||
AucRocResultTests.randomResult(),
|
||||
PrecisionMetricResultTests.randomResult(),
|
||||
RecallMetricResultTests.randomResult(),
|
||||
ConfusionMatrixMetricResultTests.randomResult()
|
||||
)
|
||||
);
|
||||
break;
|
||||
case Regression.NAME:
|
||||
metrics = randomSubsetOf(
|
||||
Arrays.asList(MeanSquaredErrorMetricResultTests.randomResult(), RSquaredMetricResultTests.randomResult())
|
||||
);
|
||||
break;
|
||||
case Classification.NAME:
|
||||
metrics = randomSubsetOf(
|
||||
Arrays.asList(
|
||||
AucRocResultTests.randomResult(),
|
||||
AccuracyMetricResultTests.randomResult(),
|
||||
org.elasticsearch.client.ml.dataframe.evaluation.classification.PrecisionMetricResultTests.randomResult(),
|
||||
org.elasticsearch.client.ml.dataframe.evaluation.classification.RecallMetricResultTests.randomResult(),
|
||||
MulticlassConfusionMatrixMetricResultTests.randomResult()
|
||||
)
|
||||
);
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("Please add missing \"case\" variant to the \"switch\" statement");
|
||||
}
|
||||
List<EvaluationMetric.Result> metrics = switch (Evaluation.valueOf(evaluationName.toUpperCase(Locale.ROOT))) {
|
||||
case OUTLIER_DETECTION -> randomSubsetOf(
|
||||
Arrays.asList(
|
||||
AucRocResultTests.randomResult(),
|
||||
PrecisionMetricResultTests.randomResult(),
|
||||
RecallMetricResultTests.randomResult(),
|
||||
ConfusionMatrixMetricResultTests.randomResult()
|
||||
)
|
||||
);
|
||||
case REGRESSION -> randomSubsetOf(
|
||||
Arrays.asList(MeanSquaredErrorMetricResultTests.randomResult(), RSquaredMetricResultTests.randomResult())
|
||||
);
|
||||
case CLASSIFICATION -> randomSubsetOf(
|
||||
Arrays.asList(
|
||||
AucRocResultTests.randomResult(),
|
||||
AccuracyMetricResultTests.randomResult(),
|
||||
org.elasticsearch.client.ml.dataframe.evaluation.classification.PrecisionMetricResultTests.randomResult(),
|
||||
org.elasticsearch.client.ml.dataframe.evaluation.classification.RecallMetricResultTests.randomResult(),
|
||||
MulticlassConfusionMatrixMetricResultTests.randomResult()
|
||||
)
|
||||
);
|
||||
};
|
||||
return new EvaluateDataFrameResponse(evaluationName, metrics);
|
||||
}
|
||||
|
||||
|
|
|
@ -82,18 +82,14 @@ public class DateHistogramGroupConfigTests extends AbstractXContentTestCase<Date
|
|||
final DateHistogramInterval delay = randomBoolean() ? new DateHistogramInterval(randomPositiveTimeValue()) : null;
|
||||
final String timezone = randomBoolean() ? randomZone().toString() : null;
|
||||
int i = randomIntBetween(0, 2);
|
||||
final DateHistogramInterval interval;
|
||||
switch (i) {
|
||||
case 0:
|
||||
interval = new DateHistogramInterval(randomPositiveTimeValue());
|
||||
return new DateHistogramGroupConfig.FixedInterval(field, interval, delay, timezone);
|
||||
case 1:
|
||||
interval = new DateHistogramInterval(randomTimeValue(1, 1, "m", "h", "d", "w"));
|
||||
return new DateHistogramGroupConfig.CalendarInterval(field, interval, delay, timezone);
|
||||
default:
|
||||
interval = new DateHistogramInterval(randomPositiveTimeValue());
|
||||
return new DateHistogramGroupConfig(field, interval, delay, timezone);
|
||||
}
|
||||
|
||||
final DateHistogramInterval interval = switch (i) {
|
||||
case 1 -> new DateHistogramInterval(randomTimeValue(1, 1, "m", "h", "d", "w"));
|
||||
default -> new DateHistogramInterval(randomPositiveTimeValue());
|
||||
};
|
||||
return switch (i) {
|
||||
case 0 -> new DateHistogramGroupConfig.FixedInterval(field, interval, delay, timezone);
|
||||
case 1 -> new DateHistogramGroupConfig.CalendarInterval(field, interval, delay, timezone);
|
||||
default -> new DateHistogramGroupConfig(field, interval, delay, timezone);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -48,28 +48,25 @@ public class ClearServiceAccountTokenCacheRequestTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private ClearServiceAccountTokenCacheRequest mutateInstance(ClearServiceAccountTokenCacheRequest request) {
|
||||
switch (randomIntBetween(0, 2)) {
|
||||
case 0:
|
||||
return new ClearServiceAccountTokenCacheRequest(
|
||||
randomValueOtherThan(request.getNamespace(), () -> randomAlphaOfLengthBetween(3, 8)),
|
||||
request.getServiceName(),
|
||||
request.getTokenNames()
|
||||
);
|
||||
case 1:
|
||||
return new ClearServiceAccountTokenCacheRequest(
|
||||
request.getNamespace(),
|
||||
randomValueOtherThan(request.getServiceName(), () -> randomAlphaOfLengthBetween(3, 8)),
|
||||
request.getTokenNames()
|
||||
);
|
||||
default:
|
||||
return new ClearServiceAccountTokenCacheRequest(
|
||||
request.getNamespace(),
|
||||
request.getServiceName(),
|
||||
randomValueOtherThanMany(
|
||||
a -> Arrays.equals(a, request.getTokenNames()),
|
||||
() -> randomArray(0, 3, String[]::new, () -> randomAlphaOfLengthBetween(3, 8))
|
||||
)
|
||||
);
|
||||
}
|
||||
return switch (randomIntBetween(0, 2)) {
|
||||
case 0 -> new ClearServiceAccountTokenCacheRequest(
|
||||
randomValueOtherThan(request.getNamespace(), () -> randomAlphaOfLengthBetween(3, 8)),
|
||||
request.getServiceName(),
|
||||
request.getTokenNames()
|
||||
);
|
||||
case 1 -> new ClearServiceAccountTokenCacheRequest(
|
||||
request.getNamespace(),
|
||||
randomValueOtherThan(request.getServiceName(), () -> randomAlphaOfLengthBetween(3, 8)),
|
||||
request.getTokenNames()
|
||||
);
|
||||
default -> new ClearServiceAccountTokenCacheRequest(
|
||||
request.getNamespace(),
|
||||
request.getServiceName(),
|
||||
randomValueOtherThanMany(
|
||||
a -> Arrays.equals(a, request.getTokenNames()),
|
||||
() -> randomArray(0, 3, String[]::new, () -> randomAlphaOfLengthBetween(3, 8))
|
||||
)
|
||||
);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -86,27 +86,22 @@ public class CreateApiKeyResponseTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private static CreateApiKeyResponse mutateTestItem(CreateApiKeyResponse original) {
|
||||
switch (randomIntBetween(0, 3)) {
|
||||
case 0:
|
||||
return new CreateApiKeyResponse(randomAlphaOfLength(7), original.getId(), original.getKey(), original.getExpiration());
|
||||
case 1:
|
||||
return new CreateApiKeyResponse(
|
||||
original.getName(),
|
||||
randomAlphaOfLengthBetween(4, 8),
|
||||
original.getKey(),
|
||||
original.getExpiration()
|
||||
);
|
||||
case 2:
|
||||
return new CreateApiKeyResponse(
|
||||
original.getName(),
|
||||
original.getId(),
|
||||
UUIDs.randomBase64UUIDSecureString(),
|
||||
original.getExpiration()
|
||||
);
|
||||
case 3:
|
||||
return new CreateApiKeyResponse(original.getName(), original.getId(), original.getKey(), Instant.ofEpochMilli(150000));
|
||||
default:
|
||||
return new CreateApiKeyResponse(randomAlphaOfLength(7), original.getId(), original.getKey(), original.getExpiration());
|
||||
}
|
||||
return switch (randomIntBetween(0, 3)) {
|
||||
case 0 -> new CreateApiKeyResponse(randomAlphaOfLength(7), original.getId(), original.getKey(), original.getExpiration());
|
||||
case 1 -> new CreateApiKeyResponse(
|
||||
original.getName(),
|
||||
randomAlphaOfLengthBetween(4, 8),
|
||||
original.getKey(),
|
||||
original.getExpiration()
|
||||
);
|
||||
case 2 -> new CreateApiKeyResponse(
|
||||
original.getName(),
|
||||
original.getId(),
|
||||
UUIDs.randomBase64UUIDSecureString(),
|
||||
original.getExpiration()
|
||||
);
|
||||
case 3 -> new CreateApiKeyResponse(original.getName(), original.getId(), original.getKey(), Instant.ofEpochMilli(150000));
|
||||
default -> new CreateApiKeyResponse(randomAlphaOfLength(7), original.getId(), original.getKey(), original.getExpiration());
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -66,35 +66,31 @@ public class CreateServiceAccountTokenRequestTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private CreateServiceAccountTokenRequest mutateInstance(CreateServiceAccountTokenRequest request) {
|
||||
switch (randomIntBetween(0, 3)) {
|
||||
case 0:
|
||||
return new CreateServiceAccountTokenRequest(
|
||||
randomValueOtherThan(request.getNamespace(), () -> randomAlphaOfLengthBetween(3, 8)),
|
||||
request.getServiceName(),
|
||||
request.getTokenName(),
|
||||
request.getRefreshPolicy()
|
||||
);
|
||||
case 1:
|
||||
return new CreateServiceAccountTokenRequest(
|
||||
request.getNamespace(),
|
||||
randomValueOtherThan(request.getServiceName(), () -> randomAlphaOfLengthBetween(3, 8)),
|
||||
request.getTokenName(),
|
||||
request.getRefreshPolicy()
|
||||
);
|
||||
case 2:
|
||||
return new CreateServiceAccountTokenRequest(
|
||||
request.getNamespace(),
|
||||
request.getServiceName(),
|
||||
randomValueOtherThan(request.getTokenName(), () -> randomAlphaOfLengthBetween(3, 8)),
|
||||
request.getRefreshPolicy()
|
||||
);
|
||||
default:
|
||||
return new CreateServiceAccountTokenRequest(
|
||||
request.getNamespace(),
|
||||
request.getServiceName(),
|
||||
request.getTokenName(),
|
||||
randomValueOtherThan(request.getRefreshPolicy(), () -> randomFrom(RefreshPolicy.values()))
|
||||
);
|
||||
}
|
||||
return switch (randomIntBetween(0, 3)) {
|
||||
case 0 -> new CreateServiceAccountTokenRequest(
|
||||
randomValueOtherThan(request.getNamespace(), () -> randomAlphaOfLengthBetween(3, 8)),
|
||||
request.getServiceName(),
|
||||
request.getTokenName(),
|
||||
request.getRefreshPolicy()
|
||||
);
|
||||
case 1 -> new CreateServiceAccountTokenRequest(
|
||||
request.getNamespace(),
|
||||
randomValueOtherThan(request.getServiceName(), () -> randomAlphaOfLengthBetween(3, 8)),
|
||||
request.getTokenName(),
|
||||
request.getRefreshPolicy()
|
||||
);
|
||||
case 2 -> new CreateServiceAccountTokenRequest(
|
||||
request.getNamespace(),
|
||||
request.getServiceName(),
|
||||
randomValueOtherThan(request.getTokenName(), () -> randomAlphaOfLengthBetween(3, 8)),
|
||||
request.getRefreshPolicy()
|
||||
);
|
||||
default -> new CreateServiceAccountTokenRequest(
|
||||
request.getNamespace(),
|
||||
request.getServiceName(),
|
||||
request.getTokenName(),
|
||||
randomValueOtherThan(request.getRefreshPolicy(), () -> randomFrom(RefreshPolicy.values()))
|
||||
);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -65,36 +65,32 @@ public class DeleteServiceAccountTokenRequestTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private DeleteServiceAccountTokenRequest mutateInstance(DeleteServiceAccountTokenRequest request) {
|
||||
switch (randomIntBetween(0, 3)) {
|
||||
case 0:
|
||||
return new DeleteServiceAccountTokenRequest(
|
||||
randomValueOtherThan(request.getNamespace(), () -> randomAlphaOfLengthBetween(3, 8)),
|
||||
request.getServiceName(),
|
||||
request.getTokenName(),
|
||||
request.getRefreshPolicy()
|
||||
);
|
||||
case 1:
|
||||
return new DeleteServiceAccountTokenRequest(
|
||||
request.getNamespace(),
|
||||
randomValueOtherThan(request.getServiceName(), () -> randomAlphaOfLengthBetween(3, 8)),
|
||||
request.getTokenName(),
|
||||
request.getRefreshPolicy()
|
||||
);
|
||||
case 2:
|
||||
return new DeleteServiceAccountTokenRequest(
|
||||
request.getNamespace(),
|
||||
request.getServiceName(),
|
||||
randomValueOtherThan(request.getTokenName(), () -> randomAlphaOfLengthBetween(3, 8)),
|
||||
request.getRefreshPolicy()
|
||||
);
|
||||
default:
|
||||
return new DeleteServiceAccountTokenRequest(
|
||||
request.getNamespace(),
|
||||
request.getServiceName(),
|
||||
request.getTokenName(),
|
||||
randomValueOtherThan(request.getRefreshPolicy(), () -> randomFrom(RefreshPolicy.values()))
|
||||
);
|
||||
}
|
||||
return switch (randomIntBetween(0, 3)) {
|
||||
case 0 -> new DeleteServiceAccountTokenRequest(
|
||||
randomValueOtherThan(request.getNamespace(), () -> randomAlphaOfLengthBetween(3, 8)),
|
||||
request.getServiceName(),
|
||||
request.getTokenName(),
|
||||
request.getRefreshPolicy()
|
||||
);
|
||||
case 1 -> new DeleteServiceAccountTokenRequest(
|
||||
request.getNamespace(),
|
||||
randomValueOtherThan(request.getServiceName(), () -> randomAlphaOfLengthBetween(3, 8)),
|
||||
request.getTokenName(),
|
||||
request.getRefreshPolicy()
|
||||
);
|
||||
case 2 -> new DeleteServiceAccountTokenRequest(
|
||||
request.getNamespace(),
|
||||
request.getServiceName(),
|
||||
randomValueOtherThan(request.getTokenName(), () -> randomAlphaOfLengthBetween(3, 8)),
|
||||
request.getRefreshPolicy()
|
||||
);
|
||||
default -> new DeleteServiceAccountTokenRequest(
|
||||
request.getNamespace(),
|
||||
request.getServiceName(),
|
||||
request.getTokenName(),
|
||||
randomValueOtherThan(request.getRefreshPolicy(), () -> randomFrom(RefreshPolicy.values()))
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -17,7 +17,6 @@ import org.elasticsearch.xcontent.XContentType;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
|
@ -79,71 +78,56 @@ public class ExpressionRoleMappingTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private static ExpressionRoleMapping mutateTestItem(ExpressionRoleMapping original) throws IOException {
|
||||
ExpressionRoleMapping mutated = null;
|
||||
switch (randomIntBetween(0, 5)) {
|
||||
case 0:
|
||||
mutated = new ExpressionRoleMapping(
|
||||
"namechanged",
|
||||
FieldRoleMapperExpression.ofKeyValues("realm.name", "kerb1"),
|
||||
singletonList("superuser"),
|
||||
Collections.emptyList(),
|
||||
null,
|
||||
true
|
||||
);
|
||||
break;
|
||||
case 1:
|
||||
mutated = new ExpressionRoleMapping(
|
||||
"kerberosmapping",
|
||||
FieldRoleMapperExpression.ofKeyValues("changed", "changed"),
|
||||
singletonList("superuser"),
|
||||
Collections.emptyList(),
|
||||
null,
|
||||
true
|
||||
);
|
||||
break;
|
||||
case 2:
|
||||
mutated = new ExpressionRoleMapping(
|
||||
"kerberosmapping",
|
||||
FieldRoleMapperExpression.ofKeyValues("realm.name", "kerb1"),
|
||||
singletonList("changed"),
|
||||
Collections.emptyList(),
|
||||
null,
|
||||
true
|
||||
);
|
||||
break;
|
||||
case 3:
|
||||
Map<String, Object> metadata = new HashMap<>();
|
||||
metadata.put("a", "b");
|
||||
mutated = new ExpressionRoleMapping(
|
||||
"kerberosmapping",
|
||||
FieldRoleMapperExpression.ofKeyValues("realm.name", "kerb1"),
|
||||
singletonList("superuser"),
|
||||
Collections.emptyList(),
|
||||
metadata,
|
||||
true
|
||||
);
|
||||
break;
|
||||
case 4:
|
||||
mutated = new ExpressionRoleMapping(
|
||||
"kerberosmapping",
|
||||
FieldRoleMapperExpression.ofKeyValues("realm.name", "kerb1"),
|
||||
Collections.emptyList(),
|
||||
singletonList(new TemplateRoleName(Collections.singletonMap("source", "superuser"), TemplateRoleName.Format.STRING)),
|
||||
null,
|
||||
true
|
||||
);
|
||||
break;
|
||||
case 5:
|
||||
mutated = new ExpressionRoleMapping(
|
||||
"kerberosmapping",
|
||||
FieldRoleMapperExpression.ofKeyValues("realm.name", "kerb1"),
|
||||
singletonList("superuser"),
|
||||
Collections.emptyList(),
|
||||
null,
|
||||
false
|
||||
);
|
||||
break;
|
||||
}
|
||||
return mutated;
|
||||
return switch (randomIntBetween(0, 5)) {
|
||||
case 0 -> new ExpressionRoleMapping(
|
||||
"namechanged",
|
||||
FieldRoleMapperExpression.ofKeyValues("realm.name", "kerb1"),
|
||||
singletonList("superuser"),
|
||||
Collections.emptyList(),
|
||||
null,
|
||||
true
|
||||
);
|
||||
case 1 -> new ExpressionRoleMapping(
|
||||
"kerberosmapping",
|
||||
FieldRoleMapperExpression.ofKeyValues("changed", "changed"),
|
||||
singletonList("superuser"),
|
||||
Collections.emptyList(),
|
||||
null,
|
||||
true
|
||||
);
|
||||
case 2 -> new ExpressionRoleMapping(
|
||||
"kerberosmapping",
|
||||
FieldRoleMapperExpression.ofKeyValues("realm.name", "kerb1"),
|
||||
singletonList("changed"),
|
||||
Collections.emptyList(),
|
||||
null,
|
||||
true
|
||||
);
|
||||
case 3 -> new ExpressionRoleMapping(
|
||||
"kerberosmapping",
|
||||
FieldRoleMapperExpression.ofKeyValues("realm.name", "kerb1"),
|
||||
singletonList("superuser"),
|
||||
Collections.emptyList(),
|
||||
Map.of("a", "b"),
|
||||
true
|
||||
);
|
||||
case 4 -> new ExpressionRoleMapping(
|
||||
"kerberosmapping",
|
||||
FieldRoleMapperExpression.ofKeyValues("realm.name", "kerb1"),
|
||||
Collections.emptyList(),
|
||||
singletonList(new TemplateRoleName(Collections.singletonMap("source", "superuser"), TemplateRoleName.Format.STRING)),
|
||||
null,
|
||||
true
|
||||
);
|
||||
case 5 -> new ExpressionRoleMapping(
|
||||
"kerberosmapping",
|
||||
FieldRoleMapperExpression.ofKeyValues("realm.name", "kerb1"),
|
||||
singletonList("superuser"),
|
||||
Collections.emptyList(),
|
||||
null,
|
||||
false
|
||||
);
|
||||
default -> throw new UnsupportedOperationException();
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -104,12 +104,10 @@ public class GetApiKeyResponseTests extends ESTestCase {
|
|||
"user-b",
|
||||
"realm-y"
|
||||
);
|
||||
switch (randomIntBetween(0, 2)) {
|
||||
case 0:
|
||||
return new GetApiKeyResponse(Arrays.asList(apiKeyInfo));
|
||||
default:
|
||||
return new GetApiKeyResponse(Arrays.asList(apiKeyInfo));
|
||||
}
|
||||
return switch (randomIntBetween(0, 2)) {
|
||||
case 0 -> new GetApiKeyResponse(Arrays.asList(apiKeyInfo));
|
||||
default -> new GetApiKeyResponse(Arrays.asList(apiKeyInfo));
|
||||
};
|
||||
}
|
||||
|
||||
private static ApiKey createApiKeyInfo(
|
||||
|
|
|
@ -107,11 +107,10 @@ public class GetRoleMappingsResponseTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private static GetRoleMappingsResponse mutateTestItem(GetRoleMappingsResponse original) {
|
||||
GetRoleMappingsResponse mutated = null;
|
||||
switch (randomIntBetween(0, 1)) {
|
||||
case 0:
|
||||
final List<ExpressionRoleMapping> roleMappingsList1 = new ArrayList<>();
|
||||
roleMappingsList1.add(
|
||||
ExpressionRoleMapping originalRoleMapping = original.getMappings().get(0);
|
||||
return switch (randomIntBetween(0, 1)) {
|
||||
case 0 -> new GetRoleMappingsResponse(
|
||||
List.of(
|
||||
new ExpressionRoleMapping(
|
||||
"ldapmapping",
|
||||
FieldRoleMapperExpression.ofGroups("cn=ipausers,cn=groups,cn=accounts,dc=ipademo,dc=local"),
|
||||
|
@ -120,13 +119,10 @@ public class GetRoleMappingsResponseTests extends ESTestCase {
|
|||
null,
|
||||
false
|
||||
)
|
||||
);
|
||||
mutated = new GetRoleMappingsResponse(roleMappingsList1);
|
||||
break;
|
||||
case 1:
|
||||
final List<ExpressionRoleMapping> roleMappingsList2 = new ArrayList<>();
|
||||
ExpressionRoleMapping originalRoleMapping = original.getMappings().get(0);
|
||||
roleMappingsList2.add(
|
||||
)
|
||||
);
|
||||
default -> new GetRoleMappingsResponse(
|
||||
List.of(
|
||||
new ExpressionRoleMapping(
|
||||
originalRoleMapping.getName(),
|
||||
FieldRoleMapperExpression.ofGroups("cn=ipausers,cn=groups,cn=accounts,dc=ipademo,dc=local"),
|
||||
|
@ -135,10 +131,8 @@ public class GetRoleMappingsResponseTests extends ESTestCase {
|
|||
originalRoleMapping.getMetadata(),
|
||||
originalRoleMapping.isEnabled() == false
|
||||
)
|
||||
);
|
||||
mutated = new GetRoleMappingsResponse(roleMappingsList2);
|
||||
break;
|
||||
}
|
||||
return mutated;
|
||||
)
|
||||
);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,17 +37,15 @@ public class GetServiceAccountCredentialsRequestTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private GetServiceAccountCredentialsRequest mutateInstance(GetServiceAccountCredentialsRequest request) {
|
||||
switch (randomIntBetween(0, 1)) {
|
||||
case 0:
|
||||
return new GetServiceAccountCredentialsRequest(
|
||||
randomValueOtherThan(request.getNamespace(), () -> randomAlphaOfLengthBetween(3, 8)),
|
||||
request.getServiceName()
|
||||
);
|
||||
default:
|
||||
return new GetServiceAccountCredentialsRequest(
|
||||
request.getNamespace(),
|
||||
randomValueOtherThan(request.getServiceName(), () -> randomAlphaOfLengthBetween(3, 8))
|
||||
);
|
||||
}
|
||||
return switch (randomIntBetween(0, 1)) {
|
||||
case 0 -> new GetServiceAccountCredentialsRequest(
|
||||
randomValueOtherThan(request.getNamespace(), () -> randomAlphaOfLengthBetween(3, 8)),
|
||||
request.getServiceName()
|
||||
);
|
||||
default -> new GetServiceAccountCredentialsRequest(
|
||||
request.getNamespace(),
|
||||
randomValueOtherThan(request.getServiceName(), () -> randomAlphaOfLengthBetween(3, 8))
|
||||
);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -54,17 +54,15 @@ public class GetServiceAccountsRequestTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private GetServiceAccountsRequest mutateInstance(GetServiceAccountsRequest request) {
|
||||
switch (randomIntBetween(0, 1)) {
|
||||
case 0:
|
||||
return new GetServiceAccountsRequest(
|
||||
randomValueOtherThan(request.getNamespace(), () -> randomAlphaOfLengthBetween(3, 8)),
|
||||
request.getServiceName()
|
||||
);
|
||||
default:
|
||||
return new GetServiceAccountsRequest(
|
||||
request.getNamespace(),
|
||||
randomValueOtherThan(request.getServiceName(), () -> randomAlphaOfLengthBetween(3, 8))
|
||||
);
|
||||
}
|
||||
return switch (randomIntBetween(0, 1)) {
|
||||
case 0 -> new GetServiceAccountsRequest(
|
||||
randomValueOtherThan(request.getNamespace(), () -> randomAlphaOfLengthBetween(3, 8)),
|
||||
request.getServiceName()
|
||||
);
|
||||
default -> new GetServiceAccountsRequest(
|
||||
request.getNamespace(),
|
||||
randomValueOtherThan(request.getServiceName(), () -> randomAlphaOfLengthBetween(3, 8))
|
||||
);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -96,14 +96,11 @@ public class GrantApiKeyRequestTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private GrantApiKeyRequest.Grant clone(GrantApiKeyRequest.Grant grant) {
|
||||
switch (grant.getGrantType()) {
|
||||
case "password":
|
||||
return GrantApiKeyRequest.Grant.passwordGrant(grant.getUsername(), grant.getPassword());
|
||||
case "access_token":
|
||||
return GrantApiKeyRequest.Grant.accessTokenGrant(grant.getAccessToken());
|
||||
default:
|
||||
throw new IllegalArgumentException("Cannot clone grant: " + Strings.toString(grant));
|
||||
}
|
||||
return switch (grant.getGrantType()) {
|
||||
case "password" -> GrantApiKeyRequest.Grant.passwordGrant(grant.getUsername(), grant.getPassword());
|
||||
case "access_token" -> GrantApiKeyRequest.Grant.accessTokenGrant(grant.getAccessToken());
|
||||
default -> throw new IllegalArgumentException("Cannot clone grant: " + Strings.toString(grant));
|
||||
};
|
||||
}
|
||||
|
||||
private CreateApiKeyRequest clone(CreateApiKeyRequest apiKeyRequest) {
|
||||
|
@ -117,58 +114,53 @@ public class GrantApiKeyRequestTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private static GrantApiKeyRequest mutateTestItem(GrantApiKeyRequest original) {
|
||||
switch (randomIntBetween(0, 3)) {
|
||||
case 0:
|
||||
return new GrantApiKeyRequest(
|
||||
original.getGrant().getGrantType().equals("password")
|
||||
? GrantApiKeyRequest.Grant.accessTokenGrant(randomAlphaOfLength(24))
|
||||
: GrantApiKeyRequest.Grant.passwordGrant(randomAlphaOfLength(8), randomAlphaOfLengthBetween(6, 12).toCharArray()),
|
||||
original.getApiKeyRequest()
|
||||
);
|
||||
case 1:
|
||||
return new GrantApiKeyRequest(
|
||||
original.getGrant(),
|
||||
new CreateApiKeyRequest(
|
||||
randomAlphaOfLengthBetween(10, 15),
|
||||
original.getApiKeyRequest().getRoles(),
|
||||
original.getApiKeyRequest().getExpiration(),
|
||||
original.getApiKeyRequest().getRefreshPolicy(),
|
||||
original.getApiKeyRequest().getMetadata()
|
||||
)
|
||||
);
|
||||
case 2:
|
||||
return new GrantApiKeyRequest(
|
||||
original.getGrant(),
|
||||
new CreateApiKeyRequest(
|
||||
original.getApiKeyRequest().getName(),
|
||||
List.of(), // No role limits
|
||||
original.getApiKeyRequest().getExpiration(),
|
||||
original.getApiKeyRequest().getRefreshPolicy(),
|
||||
original.getApiKeyRequest().getMetadata()
|
||||
)
|
||||
);
|
||||
case 3:
|
||||
return new GrantApiKeyRequest(
|
||||
original.getGrant(),
|
||||
new CreateApiKeyRequest(
|
||||
original.getApiKeyRequest().getName(),
|
||||
original.getApiKeyRequest().getRoles(),
|
||||
original.getApiKeyRequest().getExpiration(),
|
||||
original.getApiKeyRequest().getRefreshPolicy(),
|
||||
randomValueOtherThan(original.getApiKeyRequest().getMetadata(), CreateApiKeyRequestTests::randomMetadata)
|
||||
)
|
||||
);
|
||||
default:
|
||||
return new GrantApiKeyRequest(
|
||||
original.getGrant(),
|
||||
new CreateApiKeyRequest(
|
||||
original.getApiKeyRequest().getName(),
|
||||
original.getApiKeyRequest().getRoles(),
|
||||
TimeValue.timeValueMinutes(randomIntBetween(10, 120)),
|
||||
original.getApiKeyRequest().getRefreshPolicy(),
|
||||
original.getApiKeyRequest().getMetadata()
|
||||
)
|
||||
);
|
||||
}
|
||||
return switch (randomIntBetween(0, 3)) {
|
||||
case 0 -> new GrantApiKeyRequest(
|
||||
original.getGrant().getGrantType().equals("password")
|
||||
? GrantApiKeyRequest.Grant.accessTokenGrant(randomAlphaOfLength(24))
|
||||
: GrantApiKeyRequest.Grant.passwordGrant(randomAlphaOfLength(8), randomAlphaOfLengthBetween(6, 12).toCharArray()),
|
||||
original.getApiKeyRequest()
|
||||
);
|
||||
case 1 -> new GrantApiKeyRequest(
|
||||
original.getGrant(),
|
||||
new CreateApiKeyRequest(
|
||||
randomAlphaOfLengthBetween(10, 15),
|
||||
original.getApiKeyRequest().getRoles(),
|
||||
original.getApiKeyRequest().getExpiration(),
|
||||
original.getApiKeyRequest().getRefreshPolicy(),
|
||||
original.getApiKeyRequest().getMetadata()
|
||||
)
|
||||
);
|
||||
case 2 -> new GrantApiKeyRequest(
|
||||
original.getGrant(),
|
||||
new CreateApiKeyRequest(
|
||||
original.getApiKeyRequest().getName(),
|
||||
List.of(), // No role limits
|
||||
original.getApiKeyRequest().getExpiration(),
|
||||
original.getApiKeyRequest().getRefreshPolicy(),
|
||||
original.getApiKeyRequest().getMetadata()
|
||||
)
|
||||
);
|
||||
case 3 -> new GrantApiKeyRequest(
|
||||
original.getGrant(),
|
||||
new CreateApiKeyRequest(
|
||||
original.getApiKeyRequest().getName(),
|
||||
original.getApiKeyRequest().getRoles(),
|
||||
original.getApiKeyRequest().getExpiration(),
|
||||
original.getApiKeyRequest().getRefreshPolicy(),
|
||||
randomValueOtherThan(original.getApiKeyRequest().getMetadata(), CreateApiKeyRequestTests::randomMetadata)
|
||||
)
|
||||
);
|
||||
default -> new GrantApiKeyRequest(
|
||||
original.getGrant(),
|
||||
new CreateApiKeyRequest(
|
||||
original.getApiKeyRequest().getName(),
|
||||
original.getApiKeyRequest().getRoles(),
|
||||
TimeValue.timeValueMinutes(randomIntBetween(10, 120)),
|
||||
original.getApiKeyRequest().getRefreshPolicy(),
|
||||
original.getApiKeyRequest().getMetadata()
|
||||
)
|
||||
);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -114,27 +114,23 @@ public class InvalidateApiKeyResponseTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private static InvalidateApiKeyResponse mutateTestItem(InvalidateApiKeyResponse original) {
|
||||
switch (randomIntBetween(0, 2)) {
|
||||
case 0:
|
||||
return new InvalidateApiKeyResponse(
|
||||
Arrays.asList(randomArray(2, 5, String[]::new, () -> randomAlphaOfLength(5))),
|
||||
original.getPreviouslyInvalidatedApiKeys(),
|
||||
original.getErrors()
|
||||
);
|
||||
case 1:
|
||||
return new InvalidateApiKeyResponse(original.getInvalidatedApiKeys(), Collections.emptyList(), original.getErrors());
|
||||
case 2:
|
||||
return new InvalidateApiKeyResponse(
|
||||
original.getInvalidatedApiKeys(),
|
||||
original.getPreviouslyInvalidatedApiKeys(),
|
||||
Collections.emptyList()
|
||||
);
|
||||
default:
|
||||
return new InvalidateApiKeyResponse(
|
||||
Arrays.asList(randomArray(2, 5, String[]::new, () -> randomAlphaOfLength(5))),
|
||||
original.getPreviouslyInvalidatedApiKeys(),
|
||||
original.getErrors()
|
||||
);
|
||||
}
|
||||
return switch (randomIntBetween(0, 2)) {
|
||||
case 0 -> new InvalidateApiKeyResponse(
|
||||
Arrays.asList(randomArray(2, 5, String[]::new, () -> randomAlphaOfLength(5))),
|
||||
original.getPreviouslyInvalidatedApiKeys(),
|
||||
original.getErrors()
|
||||
);
|
||||
case 1 -> new InvalidateApiKeyResponse(original.getInvalidatedApiKeys(), Collections.emptyList(), original.getErrors());
|
||||
case 2 -> new InvalidateApiKeyResponse(
|
||||
original.getInvalidatedApiKeys(),
|
||||
original.getPreviouslyInvalidatedApiKeys(),
|
||||
Collections.emptyList()
|
||||
);
|
||||
default -> new InvalidateApiKeyResponse(
|
||||
Arrays.asList(randomArray(2, 5, String[]::new, () -> randomAlphaOfLength(5))),
|
||||
original.getPreviouslyInvalidatedApiKeys(),
|
||||
original.getErrors()
|
||||
);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -72,18 +72,13 @@ public class InvalidateTokenRequestTests extends ESTestCase {
|
|||
: InvalidateTokenRequest.refreshToken(token);
|
||||
final EqualsHashCodeTestUtils.MutateFunction<InvalidateTokenRequest> mutate = r -> {
|
||||
int randomCase = randomIntBetween(1, 4);
|
||||
switch (randomCase) {
|
||||
case 1:
|
||||
return InvalidateTokenRequest.refreshToken(randomAlphaOfLength(5));
|
||||
case 2:
|
||||
return InvalidateTokenRequest.accessToken(randomAlphaOfLength(5));
|
||||
case 3:
|
||||
return InvalidateTokenRequest.realmTokens(randomAlphaOfLength(5));
|
||||
case 4:
|
||||
return InvalidateTokenRequest.userTokens(randomAlphaOfLength(5));
|
||||
default:
|
||||
return new InvalidateTokenRequest(null, null, randomAlphaOfLength(5), randomAlphaOfLength(5));
|
||||
}
|
||||
return switch (randomCase) {
|
||||
case 1 -> InvalidateTokenRequest.refreshToken(randomAlphaOfLength(5));
|
||||
case 2 -> InvalidateTokenRequest.accessToken(randomAlphaOfLength(5));
|
||||
case 3 -> InvalidateTokenRequest.realmTokens(randomAlphaOfLength(5));
|
||||
case 4 -> InvalidateTokenRequest.userTokens(randomAlphaOfLength(5));
|
||||
default -> new InvalidateTokenRequest(null, null, randomAlphaOfLength(5), randomAlphaOfLength(5));
|
||||
};
|
||||
};
|
||||
EqualsHashCodeTestUtils.checkEqualsAndHashCode(
|
||||
request,
|
||||
|
|
|
@ -63,29 +63,25 @@ public class KibanaEnrollmentResponseTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private static KibanaEnrollmentResponse mutateTestItem(KibanaEnrollmentResponse original) {
|
||||
switch (randomIntBetween(0, 3)) {
|
||||
case 0:
|
||||
return new KibanaEnrollmentResponse(
|
||||
randomAlphaOfLengthBetween(14, 20),
|
||||
new SecureString(randomAlphaOfLengthBetween(71, 90).toCharArray()),
|
||||
randomAlphaOfLength(52)
|
||||
);
|
||||
case 1:
|
||||
return new KibanaEnrollmentResponse(
|
||||
original.getTokenName(),
|
||||
new SecureString(randomAlphaOfLengthBetween(71, 90).toCharArray()),
|
||||
randomAlphaOfLength(52)
|
||||
);
|
||||
case 2:
|
||||
return new KibanaEnrollmentResponse(randomAlphaOfLengthBetween(14, 20), original.getTokenValue(), randomAlphaOfLength(52));
|
||||
case 3:
|
||||
return new KibanaEnrollmentResponse(
|
||||
randomAlphaOfLengthBetween(14, 20),
|
||||
new SecureString(randomAlphaOfLengthBetween(71, 90).toCharArray()),
|
||||
original.getHttpCa()
|
||||
);
|
||||
}
|
||||
// we never reach here
|
||||
return null;
|
||||
return switch (randomIntBetween(0, 3)) {
|
||||
case 0 -> new KibanaEnrollmentResponse(
|
||||
randomAlphaOfLengthBetween(14, 20),
|
||||
new SecureString(randomAlphaOfLengthBetween(71, 90).toCharArray()),
|
||||
randomAlphaOfLength(52)
|
||||
);
|
||||
case 1 -> new KibanaEnrollmentResponse(
|
||||
original.getTokenName(),
|
||||
new SecureString(randomAlphaOfLengthBetween(71, 90).toCharArray()),
|
||||
randomAlphaOfLength(52)
|
||||
);
|
||||
case 2 -> new KibanaEnrollmentResponse(randomAlphaOfLengthBetween(14, 20), original.getTokenValue(), randomAlphaOfLength(52));
|
||||
case 3 -> new KibanaEnrollmentResponse(
|
||||
randomAlphaOfLengthBetween(14, 20),
|
||||
new SecureString(randomAlphaOfLengthBetween(71, 90).toCharArray()),
|
||||
original.getHttpCa()
|
||||
);
|
||||
// we never reach here
|
||||
default -> null;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -78,74 +78,61 @@ public class QueryApiKeyRequestTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private QueryApiKeyRequest mutateInstance(QueryApiKeyRequest request) {
|
||||
switch (randomIntBetween(0, 5)) {
|
||||
case 0:
|
||||
return new QueryApiKeyRequest(
|
||||
randomValueOtherThan(request.getQueryBuilder(), QueryApiKeyRequestTests::randomQueryBuilder),
|
||||
request.getFrom(),
|
||||
request.getSize(),
|
||||
request.getFieldSortBuilders(),
|
||||
request.getSearchAfterBuilder()
|
||||
);
|
||||
case 1:
|
||||
return new QueryApiKeyRequest(
|
||||
request.getQueryBuilder(),
|
||||
request.getFrom() + 1,
|
||||
request.getSize(),
|
||||
request.getFieldSortBuilders(),
|
||||
request.getSearchAfterBuilder()
|
||||
);
|
||||
case 2:
|
||||
return new QueryApiKeyRequest(
|
||||
request.getQueryBuilder(),
|
||||
request.getFrom(),
|
||||
request.getSize() + 1,
|
||||
request.getFieldSortBuilders(),
|
||||
request.getSearchAfterBuilder()
|
||||
);
|
||||
case 3:
|
||||
return new QueryApiKeyRequest(
|
||||
request.getQueryBuilder(),
|
||||
request.getFrom(),
|
||||
request.getSize(),
|
||||
randomValueOtherThan(request.getFieldSortBuilders(), QueryApiKeyRequestTests::randomFieldSortBuilders),
|
||||
request.getSearchAfterBuilder()
|
||||
);
|
||||
default:
|
||||
return new QueryApiKeyRequest(
|
||||
request.getQueryBuilder(),
|
||||
request.getFrom(),
|
||||
request.getSize(),
|
||||
request.getFieldSortBuilders(),
|
||||
randomValueOtherThan(request.getSearchAfterBuilder(), QueryApiKeyRequestTests::randomSearchAfterBuilder)
|
||||
);
|
||||
|
||||
}
|
||||
return switch (randomIntBetween(0, 5)) {
|
||||
case 0 -> new QueryApiKeyRequest(
|
||||
randomValueOtherThan(request.getQueryBuilder(), QueryApiKeyRequestTests::randomQueryBuilder),
|
||||
request.getFrom(),
|
||||
request.getSize(),
|
||||
request.getFieldSortBuilders(),
|
||||
request.getSearchAfterBuilder()
|
||||
);
|
||||
case 1 -> new QueryApiKeyRequest(
|
||||
request.getQueryBuilder(),
|
||||
request.getFrom() + 1,
|
||||
request.getSize(),
|
||||
request.getFieldSortBuilders(),
|
||||
request.getSearchAfterBuilder()
|
||||
);
|
||||
case 2 -> new QueryApiKeyRequest(
|
||||
request.getQueryBuilder(),
|
||||
request.getFrom(),
|
||||
request.getSize() + 1,
|
||||
request.getFieldSortBuilders(),
|
||||
request.getSearchAfterBuilder()
|
||||
);
|
||||
case 3 -> new QueryApiKeyRequest(
|
||||
request.getQueryBuilder(),
|
||||
request.getFrom(),
|
||||
request.getSize(),
|
||||
randomValueOtherThan(request.getFieldSortBuilders(), QueryApiKeyRequestTests::randomFieldSortBuilders),
|
||||
request.getSearchAfterBuilder()
|
||||
);
|
||||
default -> new QueryApiKeyRequest(
|
||||
request.getQueryBuilder(),
|
||||
request.getFrom(),
|
||||
request.getSize(),
|
||||
request.getFieldSortBuilders(),
|
||||
randomValueOtherThan(request.getSearchAfterBuilder(), QueryApiKeyRequestTests::randomSearchAfterBuilder)
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
public static QueryBuilder randomQueryBuilder() {
|
||||
switch (randomIntBetween(0, 5)) {
|
||||
case 0:
|
||||
return QueryBuilders.matchAllQuery();
|
||||
case 1:
|
||||
return QueryBuilders.termQuery(
|
||||
randomAlphaOfLengthBetween(3, 8),
|
||||
randomFrom(randomAlphaOfLength(8), randomInt(), randomLong(), randomDouble(), randomFloat())
|
||||
);
|
||||
case 2:
|
||||
return QueryBuilders.idsQuery().addIds(randomArray(1, 5, String[]::new, () -> randomAlphaOfLength(20)));
|
||||
case 3:
|
||||
return QueryBuilders.prefixQuery(randomAlphaOfLengthBetween(3, 8), randomAlphaOfLengthBetween(3, 8));
|
||||
case 4:
|
||||
return QueryBuilders.wildcardQuery(
|
||||
randomAlphaOfLengthBetween(3, 8),
|
||||
randomAlphaOfLengthBetween(0, 3) + "*" + randomAlphaOfLengthBetween(0, 3)
|
||||
);
|
||||
case 5:
|
||||
return QueryBuilders.rangeQuery(randomAlphaOfLengthBetween(3, 8)).from(randomNonNegativeLong()).to(randomNonNegativeLong());
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
return switch (randomIntBetween(0, 5)) {
|
||||
case 0 -> QueryBuilders.matchAllQuery();
|
||||
case 1 -> QueryBuilders.termQuery(
|
||||
randomAlphaOfLengthBetween(3, 8),
|
||||
randomFrom(randomAlphaOfLength(8), randomInt(), randomLong(), randomDouble(), randomFloat())
|
||||
);
|
||||
case 2 -> QueryBuilders.idsQuery().addIds(randomArray(1, 5, String[]::new, () -> randomAlphaOfLength(20)));
|
||||
case 3 -> QueryBuilders.prefixQuery(randomAlphaOfLengthBetween(3, 8), randomAlphaOfLengthBetween(3, 8));
|
||||
case 4 -> QueryBuilders.wildcardQuery(
|
||||
randomAlphaOfLengthBetween(3, 8),
|
||||
randomAlphaOfLengthBetween(0, 3) + "*" + randomAlphaOfLengthBetween(0, 3)
|
||||
);
|
||||
case 5 -> QueryBuilders.rangeQuery(randomAlphaOfLengthBetween(3, 8)).from(randomNonNegativeLong()).to(randomNonNegativeLong());
|
||||
default -> null;
|
||||
};
|
||||
}
|
||||
|
||||
public static List<FieldSortBuilder> randomFieldSortBuilders() {
|
||||
|
|
|
@ -61,19 +61,15 @@ public class GlobalOperationPrivilegeTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private static GlobalOperationPrivilege mutateTestItem(GlobalOperationPrivilege original) {
|
||||
switch (randomIntBetween(0, 2)) {
|
||||
case 0:
|
||||
return new GlobalOperationPrivilege(randomAlphaOfLength(5), original.getOperation(), original.getRaw());
|
||||
case 1:
|
||||
return new GlobalOperationPrivilege(original.getCategory(), randomAlphaOfLength(5), original.getRaw());
|
||||
case 2:
|
||||
return new GlobalOperationPrivilege(
|
||||
original.getCategory(),
|
||||
original.getOperation(),
|
||||
Collections.singletonMap(randomAlphaOfLength(4), randomAlphaOfLength(4))
|
||||
);
|
||||
default:
|
||||
return new GlobalOperationPrivilege(randomAlphaOfLength(5), original.getOperation(), original.getRaw());
|
||||
}
|
||||
return switch (randomIntBetween(0, 2)) {
|
||||
case 0 -> new GlobalOperationPrivilege(randomAlphaOfLength(5), original.getOperation(), original.getRaw());
|
||||
case 1 -> new GlobalOperationPrivilege(original.getCategory(), randomAlphaOfLength(5), original.getRaw());
|
||||
case 2 -> new GlobalOperationPrivilege(
|
||||
original.getCategory(),
|
||||
original.getOperation(),
|
||||
Collections.singletonMap(randomAlphaOfLength(4), randomAlphaOfLength(4))
|
||||
);
|
||||
default -> new GlobalOperationPrivilege(randomAlphaOfLength(5), original.getOperation(), original.getRaw());
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -72,23 +72,13 @@ public class TransformConfigTests extends AbstractXContentTestCase<TransformConf
|
|||
public static Map<String, Object> randomMetadata() {
|
||||
return randomMap(0, 10, () -> {
|
||||
String key = randomAlphaOfLengthBetween(1, 10);
|
||||
Object value;
|
||||
switch (randomIntBetween(0, 3)) {
|
||||
case 0:
|
||||
value = null;
|
||||
break;
|
||||
case 1:
|
||||
value = randomLong();
|
||||
break;
|
||||
case 2:
|
||||
value = randomAlphaOfLengthBetween(1, 10);
|
||||
break;
|
||||
case 3:
|
||||
value = randomMap(0, 10, () -> Tuple.tuple(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)));
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError();
|
||||
}
|
||||
Object value = switch (randomIntBetween(0, 3)) {
|
||||
case 0 -> null;
|
||||
case 1 -> randomLong();
|
||||
case 2 -> randomAlphaOfLengthBetween(1, 10);
|
||||
case 3 -> randomMap(0, 10, () -> Tuple.tuple(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10)));
|
||||
default -> throw new AssertionError();
|
||||
};
|
||||
return Tuple.tuple(key, value);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -61,17 +61,13 @@ public class AggregationConfigTests extends AbstractXContentTestCase<Aggregation
|
|||
|
||||
private static AggregationBuilder getRandomSupportedAggregation() {
|
||||
final int numberOfSupportedAggs = 4;
|
||||
switch (randomIntBetween(1, numberOfSupportedAggs)) {
|
||||
case 1:
|
||||
return AggregationBuilders.avg(randomAlphaOfLengthBetween(1, 10)).field(randomAlphaOfLengthBetween(1, 10));
|
||||
case 2:
|
||||
return AggregationBuilders.min(randomAlphaOfLengthBetween(1, 10)).field(randomAlphaOfLengthBetween(1, 10));
|
||||
case 3:
|
||||
return AggregationBuilders.max(randomAlphaOfLengthBetween(1, 10)).field(randomAlphaOfLengthBetween(1, 10));
|
||||
case 4:
|
||||
return AggregationBuilders.sum(randomAlphaOfLengthBetween(1, 10)).field(randomAlphaOfLengthBetween(1, 10));
|
||||
}
|
||||
return switch (randomIntBetween(1, numberOfSupportedAggs)) {
|
||||
case 1 -> AggregationBuilders.avg(randomAlphaOfLengthBetween(1, 10)).field(randomAlphaOfLengthBetween(1, 10));
|
||||
case 2 -> AggregationBuilders.min(randomAlphaOfLengthBetween(1, 10)).field(randomAlphaOfLengthBetween(1, 10));
|
||||
case 3 -> AggregationBuilders.max(randomAlphaOfLengthBetween(1, 10)).field(randomAlphaOfLengthBetween(1, 10));
|
||||
case 4 -> AggregationBuilders.sum(randomAlphaOfLengthBetween(1, 10)).field(randomAlphaOfLengthBetween(1, 10));
|
||||
default -> null;
|
||||
};
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,24 +34,13 @@ public class GroupConfigTests extends AbstractXContentTestCase<GroupConfig> {
|
|||
for (int i = 0; i < randomIntBetween(1, 4); ++i) {
|
||||
String targetFieldName = randomAlphaOfLengthBetween(1, 20);
|
||||
if (names.add(targetFieldName)) {
|
||||
SingleGroupSource groupBy = null;
|
||||
SingleGroupSource.Type type = randomFrom(SingleGroupSource.Type.values());
|
||||
switch (type) {
|
||||
case TERMS:
|
||||
groupBy = TermsGroupSourceTests.randomTermsGroupSource();
|
||||
break;
|
||||
case HISTOGRAM:
|
||||
groupBy = HistogramGroupSourceTests.randomHistogramGroupSource();
|
||||
break;
|
||||
case DATE_HISTOGRAM:
|
||||
groupBy = DateHistogramGroupSourceTests.randomDateHistogramGroupSource();
|
||||
break;
|
||||
case GEOTILE_GRID:
|
||||
groupBy = GeoTileGroupSourceTests.randomGeoTileGroupSource();
|
||||
break;
|
||||
default:
|
||||
fail("unknown group source type, please implement tests and add support here");
|
||||
}
|
||||
SingleGroupSource groupBy = switch (type) {
|
||||
case TERMS -> TermsGroupSourceTests.randomTermsGroupSource();
|
||||
case HISTOGRAM -> HistogramGroupSourceTests.randomHistogramGroupSource();
|
||||
case DATE_HISTOGRAM -> DateHistogramGroupSourceTests.randomDateHistogramGroupSource();
|
||||
case GEOTILE_GRID -> GeoTileGroupSourceTests.randomGeoTileGroupSource();
|
||||
};
|
||||
groups.put(targetFieldName, groupBy);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -264,15 +264,12 @@ public class InstallPluginAction implements Closeable {
|
|||
|
||||
private static void handleInstallXPack(final Build.Flavor flavor) throws UserException {
|
||||
switch (flavor) {
|
||||
case DEFAULT:
|
||||
throw new UserException(ExitCodes.CONFIG, "this distribution of Elasticsearch contains X-Pack by default");
|
||||
case OSS:
|
||||
throw new UserException(
|
||||
ExitCodes.CONFIG,
|
||||
"X-Pack is not available with the oss distribution; to use X-Pack features use the default distribution"
|
||||
);
|
||||
case UNKNOWN:
|
||||
throw new IllegalStateException("your distribution is broken");
|
||||
case DEFAULT -> throw new UserException(ExitCodes.CONFIG, "this distribution of Elasticsearch contains X-Pack by default");
|
||||
case OSS -> throw new UserException(
|
||||
ExitCodes.CONFIG,
|
||||
"X-Pack is not available with the oss distribution; to use X-Pack features use the default distribution"
|
||||
);
|
||||
case UNKNOWN -> throw new IllegalStateException("your distribution is broken");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -118,16 +118,10 @@ public final class Booleans {
|
|||
if (value == null) {
|
||||
return defaultValue;
|
||||
}
|
||||
switch (value) {
|
||||
case "false":
|
||||
case "0":
|
||||
case "off":
|
||||
case "no":
|
||||
return false;
|
||||
|
||||
default:
|
||||
return true;
|
||||
}
|
||||
return switch (value) {
|
||||
case "false", "0", "off", "no" -> false;
|
||||
default -> true;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -318,24 +318,15 @@ public class TimeValue implements Comparable<TimeValue> {
|
|||
if (duration < 0) {
|
||||
return Long.toString(duration);
|
||||
}
|
||||
switch (timeUnit) {
|
||||
case NANOSECONDS:
|
||||
return duration + "nanos";
|
||||
case MICROSECONDS:
|
||||
return duration + "micros";
|
||||
case MILLISECONDS:
|
||||
return duration + "ms";
|
||||
case SECONDS:
|
||||
return duration + "s";
|
||||
case MINUTES:
|
||||
return duration + "m";
|
||||
case HOURS:
|
||||
return duration + "h";
|
||||
case DAYS:
|
||||
return duration + "d";
|
||||
default:
|
||||
throw new IllegalArgumentException("unknown time unit: " + timeUnit.name());
|
||||
}
|
||||
return switch (timeUnit) {
|
||||
case NANOSECONDS -> duration + "nanos";
|
||||
case MICROSECONDS -> duration + "micros";
|
||||
case MILLISECONDS -> duration + "ms";
|
||||
case SECONDS -> duration + "s";
|
||||
case MINUTES -> duration + "m";
|
||||
case HOURS -> duration + "h";
|
||||
case DAYS -> duration + "d";
|
||||
};
|
||||
}
|
||||
|
||||
public static TimeValue parseTimeValue(String sValue, String settingName) {
|
||||
|
|
|
@ -60,22 +60,13 @@ final class DissectMatch {
|
|||
return;
|
||||
}
|
||||
switch (key.getModifier()) {
|
||||
case NONE:
|
||||
simpleResults.put(key.getName(), value);
|
||||
break;
|
||||
case APPEND:
|
||||
appendResults.computeIfAbsent(key.getName(), k -> new AppendResult(appendSeparator)).addValue(value, implicitAppendOrder++);
|
||||
break;
|
||||
case APPEND_WITH_ORDER:
|
||||
appendResults.computeIfAbsent(key.getName(), k -> new AppendResult(appendSeparator))
|
||||
.addValue(value, key.getAppendPosition());
|
||||
break;
|
||||
case FIELD_NAME:
|
||||
referenceResults.computeIfAbsent(key.getName(), k -> new ReferenceResult()).setKey(value);
|
||||
break;
|
||||
case FIELD_VALUE:
|
||||
referenceResults.computeIfAbsent(key.getName(), k -> new ReferenceResult()).setValue(value);
|
||||
break;
|
||||
case NONE -> simpleResults.put(key.getName(), value);
|
||||
case APPEND -> appendResults.computeIfAbsent(key.getName(), k -> new AppendResult(appendSeparator))
|
||||
.addValue(value, implicitAppendOrder++);
|
||||
case APPEND_WITH_ORDER -> appendResults.computeIfAbsent(key.getName(), k -> new AppendResult(appendSeparator))
|
||||
.addValue(value, key.getAppendPosition());
|
||||
case FIELD_NAME -> referenceResults.computeIfAbsent(key.getName(), k -> new ReferenceResult()).setKey(value);
|
||||
case FIELD_VALUE -> referenceResults.computeIfAbsent(key.getName(), k -> new ReferenceResult()).setValue(value);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -506,17 +506,13 @@ public class WellKnownText {
|
|||
}
|
||||
|
||||
private static String tokenString(StreamTokenizer stream) {
|
||||
switch (stream.ttype) {
|
||||
case StreamTokenizer.TT_WORD:
|
||||
return stream.sval;
|
||||
case StreamTokenizer.TT_EOF:
|
||||
return EOF;
|
||||
case StreamTokenizer.TT_EOL:
|
||||
return EOL;
|
||||
case StreamTokenizer.TT_NUMBER:
|
||||
return NUMBER;
|
||||
}
|
||||
return "'" + (char) stream.ttype + "'";
|
||||
return switch (stream.ttype) {
|
||||
case StreamTokenizer.TT_WORD -> stream.sval;
|
||||
case StreamTokenizer.TT_EOF -> EOF;
|
||||
case StreamTokenizer.TT_EOL -> EOL;
|
||||
case StreamTokenizer.TT_NUMBER -> NUMBER;
|
||||
default -> "'" + (char) stream.ttype + "'";
|
||||
};
|
||||
}
|
||||
|
||||
private static boolean isNumberNext(StreamTokenizer stream) throws IOException {
|
||||
|
|
|
@ -58,22 +58,15 @@ enum GrokCaptureType {
|
|||
abstract <T> T nativeExtracter(int[] backRefs, NativeExtracterMap<T> map);
|
||||
|
||||
static GrokCaptureType fromString(String str) {
|
||||
switch (str) {
|
||||
case "string":
|
||||
return STRING;
|
||||
case "int":
|
||||
return INTEGER;
|
||||
case "long":
|
||||
return LONG;
|
||||
case "float":
|
||||
return FLOAT;
|
||||
case "double":
|
||||
return DOUBLE;
|
||||
case "boolean":
|
||||
return BOOLEAN;
|
||||
default:
|
||||
return STRING;
|
||||
}
|
||||
return switch (str) {
|
||||
case "string" -> STRING;
|
||||
case "int" -> INTEGER;
|
||||
case "long" -> LONG;
|
||||
case "float" -> FLOAT;
|
||||
case "double" -> DOUBLE;
|
||||
case "boolean" -> BOOLEAN;
|
||||
default -> STRING;
|
||||
};
|
||||
}
|
||||
|
||||
protected final GrokCaptureExtracter rawExtracter(int[] backRefs, Consumer<? super String> emit) {
|
||||
|
|
|
@ -74,27 +74,15 @@ enum LZ4SafeUtils {
|
|||
int dec = 0;
|
||||
assert dOff >= matchOff && dOff - matchOff < 8;
|
||||
switch (dOff - matchOff) {
|
||||
case 1:
|
||||
matchOff -= 3;
|
||||
break;
|
||||
case 2:
|
||||
matchOff -= 2;
|
||||
break;
|
||||
case 3:
|
||||
case 1 -> matchOff -= 3;
|
||||
case 2 -> matchOff -= 2;
|
||||
case 3 -> {
|
||||
matchOff -= 3;
|
||||
dec = -1;
|
||||
break;
|
||||
case 5:
|
||||
dec = 1;
|
||||
break;
|
||||
case 6:
|
||||
dec = 2;
|
||||
break;
|
||||
case 7:
|
||||
dec = 3;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
case 5 -> dec = 1;
|
||||
case 6 -> dec = 2;
|
||||
case 7 -> dec = 3;
|
||||
}
|
||||
|
||||
copy4Bytes(dest, matchOff, dest, dOff);
|
||||
|
|
|
@ -268,17 +268,15 @@ public final class DerParser {
|
|||
}
|
||||
StringBuilder sb = new StringBuilder(64);
|
||||
switch (value[0] / 40) {
|
||||
case 0:
|
||||
sb.append('0');
|
||||
break;
|
||||
case 1:
|
||||
case 0 -> sb.append('0');
|
||||
case 1 -> {
|
||||
sb.append('1');
|
||||
value[0] -= 40;
|
||||
break;
|
||||
default:
|
||||
}
|
||||
default -> {
|
||||
sb.append('2');
|
||||
value[0] -= 80;
|
||||
break;
|
||||
}
|
||||
}
|
||||
int oidPart = 0;
|
||||
for (int i = 0; i < length; i++) {
|
||||
|
|
|
@ -654,17 +654,14 @@ public final class PemUtils {
|
|||
DerParser.Asn1Object algSequence = parser.readAsn1Object();
|
||||
parser = algSequence.getParser();
|
||||
String oidString = parser.readAsn1Object().getOid();
|
||||
switch (oidString) {
|
||||
case "1.2.840.10040.4.1":
|
||||
return "DSA";
|
||||
case "1.2.840.113549.1.1.1":
|
||||
return "RSA";
|
||||
case "1.2.840.10045.2.1":
|
||||
return "EC";
|
||||
}
|
||||
throw new GeneralSecurityException(
|
||||
"Error parsing key algorithm identifier. Algorithm with OID [" + oidString + "] is not supported"
|
||||
);
|
||||
return switch (oidString) {
|
||||
case "1.2.840.10040.4.1" -> "DSA";
|
||||
case "1.2.840.113549.1.1.1" -> "RSA";
|
||||
case "1.2.840.10045.2.1" -> "EC";
|
||||
default -> throw new GeneralSecurityException(
|
||||
"Error parsing key algorithm identifier. Algorithm with OID [" + oidString + "] is not supported"
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
public static List<Certificate> readCertificates(Collection<Path> certPaths) throws CertificateException, IOException {
|
||||
|
@ -683,92 +680,55 @@ public final class PemUtils {
|
|||
}
|
||||
|
||||
private static String getAlgorithmNameFromOid(String oidString) throws GeneralSecurityException {
|
||||
switch (oidString) {
|
||||
case "1.2.840.10040.4.1":
|
||||
return "DSA";
|
||||
case "1.2.840.113549.1.1.1":
|
||||
return "RSA";
|
||||
case "1.2.840.10045.2.1":
|
||||
return "EC";
|
||||
case "1.3.14.3.2.7":
|
||||
return "DES-CBC";
|
||||
case "2.16.840.1.101.3.4.1.1":
|
||||
return "AES-128_ECB";
|
||||
case "2.16.840.1.101.3.4.1.2":
|
||||
return "AES-128_CBC";
|
||||
case "2.16.840.1.101.3.4.1.3":
|
||||
return "AES-128_OFB";
|
||||
case "2.16.840.1.101.3.4.1.4":
|
||||
return "AES-128_CFB";
|
||||
case "2.16.840.1.101.3.4.1.6":
|
||||
return "AES-128_GCM";
|
||||
case "2.16.840.1.101.3.4.1.21":
|
||||
return "AES-192_ECB";
|
||||
case "2.16.840.1.101.3.4.1.22":
|
||||
return "AES-192_CBC";
|
||||
case "2.16.840.1.101.3.4.1.23":
|
||||
return "AES-192_OFB";
|
||||
case "2.16.840.1.101.3.4.1.24":
|
||||
return "AES-192_CFB";
|
||||
case "2.16.840.1.101.3.4.1.26":
|
||||
return "AES-192_GCM";
|
||||
case "2.16.840.1.101.3.4.1.41":
|
||||
return "AES-256_ECB";
|
||||
case "2.16.840.1.101.3.4.1.42":
|
||||
return "AES-256_CBC";
|
||||
case "2.16.840.1.101.3.4.1.43":
|
||||
return "AES-256_OFB";
|
||||
case "2.16.840.1.101.3.4.1.44":
|
||||
return "AES-256_CFB";
|
||||
case "2.16.840.1.101.3.4.1.46":
|
||||
return "AES-256_GCM";
|
||||
case "2.16.840.1.101.3.4.1.5":
|
||||
return "AESWrap-128";
|
||||
case "2.16.840.1.101.3.4.1.25":
|
||||
return "AESWrap-192";
|
||||
case "2.16.840.1.101.3.4.1.45":
|
||||
return "AESWrap-256";
|
||||
}
|
||||
return null;
|
||||
return switch (oidString) {
|
||||
case "1.2.840.10040.4.1" -> "DSA";
|
||||
case "1.2.840.113549.1.1.1" -> "RSA";
|
||||
case "1.2.840.10045.2.1" -> "EC";
|
||||
case "1.3.14.3.2.7" -> "DES-CBC";
|
||||
case "2.16.840.1.101.3.4.1.1" -> "AES-128_ECB";
|
||||
case "2.16.840.1.101.3.4.1.2" -> "AES-128_CBC";
|
||||
case "2.16.840.1.101.3.4.1.3" -> "AES-128_OFB";
|
||||
case "2.16.840.1.101.3.4.1.4" -> "AES-128_CFB";
|
||||
case "2.16.840.1.101.3.4.1.6" -> "AES-128_GCM";
|
||||
case "2.16.840.1.101.3.4.1.21" -> "AES-192_ECB";
|
||||
case "2.16.840.1.101.3.4.1.22" -> "AES-192_CBC";
|
||||
case "2.16.840.1.101.3.4.1.23" -> "AES-192_OFB";
|
||||
case "2.16.840.1.101.3.4.1.24" -> "AES-192_CFB";
|
||||
case "2.16.840.1.101.3.4.1.26" -> "AES-192_GCM";
|
||||
case "2.16.840.1.101.3.4.1.41" -> "AES-256_ECB";
|
||||
case "2.16.840.1.101.3.4.1.42" -> "AES-256_CBC";
|
||||
case "2.16.840.1.101.3.4.1.43" -> "AES-256_OFB";
|
||||
case "2.16.840.1.101.3.4.1.44" -> "AES-256_CFB";
|
||||
case "2.16.840.1.101.3.4.1.46" -> "AES-256_GCM";
|
||||
case "2.16.840.1.101.3.4.1.5" -> "AESWrap-128";
|
||||
case "2.16.840.1.101.3.4.1.25" -> "AESWrap-192";
|
||||
case "2.16.840.1.101.3.4.1.45" -> "AESWrap-256";
|
||||
default -> null;
|
||||
};
|
||||
}
|
||||
|
||||
private static String getEcCurveNameFromOid(String oidString) throws GeneralSecurityException {
|
||||
switch (oidString) {
|
||||
return switch (oidString) {
|
||||
// see https://tools.ietf.org/html/rfc5480#section-2.1.1.1
|
||||
case "1.2.840.10045.3.1":
|
||||
return "secp192r1";
|
||||
case "1.3.132.0.1":
|
||||
return "sect163k1";
|
||||
case "1.3.132.0.15":
|
||||
return "sect163r2";
|
||||
case "1.3.132.0.33":
|
||||
return "secp224r1";
|
||||
case "1.3.132.0.26":
|
||||
return "sect233k1";
|
||||
case "1.3.132.0.27":
|
||||
return "sect233r1";
|
||||
case "1.2.840.10045.3.1.7":
|
||||
return "secp256r1";
|
||||
case "1.3.132.0.16":
|
||||
return "sect283k1";
|
||||
case "1.3.132.0.17":
|
||||
return "sect283r1";
|
||||
case "1.3.132.0.34":
|
||||
return "secp384r1";
|
||||
case "1.3.132.0.36":
|
||||
return "sect409k1";
|
||||
case "1.3.132.0.37":
|
||||
return "sect409r1";
|
||||
case "1.3.132.0.35":
|
||||
return "secp521r1";
|
||||
case "1.3.132.0.38":
|
||||
return "sect571k1";
|
||||
case "1.3.132.0.39":
|
||||
return "sect571r1";
|
||||
}
|
||||
throw new GeneralSecurityException(
|
||||
"Error parsing EC named curve identifier. Named curve with OID: " + oidString + " is not supported"
|
||||
);
|
||||
case "1.2.840.10045.3.1" -> "secp192r1";
|
||||
case "1.3.132.0.1" -> "sect163k1";
|
||||
case "1.3.132.0.15" -> "sect163r2";
|
||||
case "1.3.132.0.33" -> "secp224r1";
|
||||
case "1.3.132.0.26" -> "sect233k1";
|
||||
case "1.3.132.0.27" -> "sect233r1";
|
||||
case "1.2.840.10045.3.1.7" -> "secp256r1";
|
||||
case "1.3.132.0.16" -> "sect283k1";
|
||||
case "1.3.132.0.17" -> "sect283r1";
|
||||
case "1.3.132.0.34" -> "secp384r1";
|
||||
case "1.3.132.0.36" -> "sect409k1";
|
||||
case "1.3.132.0.37" -> "sect409r1";
|
||||
case "1.3.132.0.35" -> "secp521r1";
|
||||
case "1.3.132.0.38" -> "sect571k1";
|
||||
case "1.3.132.0.39" -> "sect571r1";
|
||||
default -> throw new GeneralSecurityException(
|
||||
"Error parsing EC named curve identifier. Named curve with OID: " + oidString + " is not supported"
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -91,54 +91,51 @@ public class SslConfigurationTests extends ESTestCase {
|
|||
orig.getCipherSuites(),
|
||||
orig.getSupportedProtocols()
|
||||
),
|
||||
orig -> {
|
||||
switch (randomIntBetween(1, 4)) {
|
||||
case 1:
|
||||
return new SslConfiguration(
|
||||
true,
|
||||
orig.getTrustConfig(),
|
||||
orig.getKeyConfig(),
|
||||
randomValueOtherThan(orig.getVerificationMode(), () -> randomFrom(SslVerificationMode.values())),
|
||||
orig.getClientAuth(),
|
||||
orig.getCipherSuites(),
|
||||
orig.getSupportedProtocols()
|
||||
);
|
||||
case 2:
|
||||
return new SslConfiguration(
|
||||
true,
|
||||
orig.getTrustConfig(),
|
||||
orig.getKeyConfig(),
|
||||
orig.getVerificationMode(),
|
||||
randomValueOtherThan(orig.getClientAuth(), () -> randomFrom(SslClientAuthenticationMode.values())),
|
||||
orig.getCipherSuites(),
|
||||
orig.getSupportedProtocols()
|
||||
);
|
||||
case 3:
|
||||
return new SslConfiguration(
|
||||
true,
|
||||
orig.getTrustConfig(),
|
||||
orig.getKeyConfig(),
|
||||
orig.getVerificationMode(),
|
||||
orig.getClientAuth(),
|
||||
DEFAULT_CIPHERS,
|
||||
orig.getSupportedProtocols()
|
||||
);
|
||||
case 4:
|
||||
default:
|
||||
return new SslConfiguration(
|
||||
true,
|
||||
orig.getTrustConfig(),
|
||||
orig.getKeyConfig(),
|
||||
orig.getVerificationMode(),
|
||||
orig.getClientAuth(),
|
||||
orig.getCipherSuites(),
|
||||
Arrays.asList(VALID_PROTOCOLS)
|
||||
);
|
||||
}
|
||||
}
|
||||
this::mutateSslConfiguration
|
||||
);
|
||||
}
|
||||
|
||||
private SslConfiguration mutateSslConfiguration(SslConfiguration orig) {
|
||||
return switch (randomIntBetween(1, 4)) {
|
||||
case 1 -> new SslConfiguration(
|
||||
true,
|
||||
orig.getTrustConfig(),
|
||||
orig.getKeyConfig(),
|
||||
randomValueOtherThan(orig.getVerificationMode(), () -> randomFrom(SslVerificationMode.values())),
|
||||
orig.getClientAuth(),
|
||||
orig.getCipherSuites(),
|
||||
orig.getSupportedProtocols()
|
||||
);
|
||||
case 2 -> new SslConfiguration(
|
||||
true,
|
||||
orig.getTrustConfig(),
|
||||
orig.getKeyConfig(),
|
||||
orig.getVerificationMode(),
|
||||
randomValueOtherThan(orig.getClientAuth(), () -> randomFrom(SslClientAuthenticationMode.values())),
|
||||
orig.getCipherSuites(),
|
||||
orig.getSupportedProtocols()
|
||||
);
|
||||
case 3 -> new SslConfiguration(
|
||||
true,
|
||||
orig.getTrustConfig(),
|
||||
orig.getKeyConfig(),
|
||||
orig.getVerificationMode(),
|
||||
orig.getClientAuth(),
|
||||
DEFAULT_CIPHERS,
|
||||
orig.getSupportedProtocols()
|
||||
);
|
||||
default -> new SslConfiguration(
|
||||
true,
|
||||
orig.getTrustConfig(),
|
||||
orig.getKeyConfig(),
|
||||
orig.getVerificationMode(),
|
||||
orig.getClientAuth(),
|
||||
orig.getCipherSuites(),
|
||||
Arrays.asList(VALID_PROTOCOLS)
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
public void testDependentFiles() {
|
||||
final SslTrustConfig trustConfig = Mockito.mock(SslTrustConfig.class);
|
||||
final SslKeyConfig keyConfig = Mockito.mock(SslKeyConfig.class);
|
||||
|
|
|
@ -121,29 +121,16 @@ public final class ObjectParser<Value, Context> extends AbstractObjectParser<Val
|
|||
return (objectParser, field, location, parser, value, context) -> {
|
||||
XContentParser.Token t = parser.currentToken();
|
||||
switch (t) {
|
||||
case VALUE_STRING:
|
||||
consumer.accept(value, field, parser.text());
|
||||
break;
|
||||
case VALUE_NUMBER:
|
||||
consumer.accept(value, field, parser.numberValue());
|
||||
break;
|
||||
case VALUE_BOOLEAN:
|
||||
consumer.accept(value, field, parser.booleanValue());
|
||||
break;
|
||||
case VALUE_NULL:
|
||||
consumer.accept(value, field, null);
|
||||
break;
|
||||
case START_OBJECT:
|
||||
consumer.accept(value, field, parser.map());
|
||||
break;
|
||||
case START_ARRAY:
|
||||
consumer.accept(value, field, parser.list());
|
||||
break;
|
||||
default:
|
||||
throw new XContentParseException(
|
||||
parser.getTokenLocation(),
|
||||
"[" + objectParser.name + "] cannot parse field [" + field + "] with value type [" + t + "]"
|
||||
);
|
||||
case VALUE_STRING -> consumer.accept(value, field, parser.text());
|
||||
case VALUE_NUMBER -> consumer.accept(value, field, parser.numberValue());
|
||||
case VALUE_BOOLEAN -> consumer.accept(value, field, parser.booleanValue());
|
||||
case VALUE_NULL -> consumer.accept(value, field, null);
|
||||
case START_OBJECT -> consumer.accept(value, field, parser.map());
|
||||
case START_ARRAY -> consumer.accept(value, field, parser.list());
|
||||
default -> throw new XContentParseException(
|
||||
parser.getTokenLocation(),
|
||||
"[" + objectParser.name + "] cannot parse field [" + field + "] with value type [" + t + "]"
|
||||
);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -642,7 +629,7 @@ public final class ObjectParser<Value, Context> extends AbstractObjectParser<Val
|
|||
private void parseSub(XContentParser parser, FieldParser fieldParser, String currentFieldName, Value value, Context context) {
|
||||
final XContentParser.Token token = parser.currentToken();
|
||||
switch (token) {
|
||||
case START_OBJECT:
|
||||
case START_OBJECT -> {
|
||||
parseValue(parser, fieldParser, currentFieldName, value, context);
|
||||
/*
|
||||
* Well behaving parsers should consume the entire object but
|
||||
|
@ -655,8 +642,8 @@ public final class ObjectParser<Value, Context> extends AbstractObjectParser<Val
|
|||
if (parser.currentToken() != XContentParser.Token.END_OBJECT) {
|
||||
throwMustEndOn(currentFieldName, XContentParser.Token.END_OBJECT);
|
||||
}
|
||||
break;
|
||||
case START_ARRAY:
|
||||
}
|
||||
case START_ARRAY -> {
|
||||
parseArray(parser, fieldParser, currentFieldName, value, context);
|
||||
/*
|
||||
* Well behaving parsers should consume the entire array but
|
||||
|
@ -669,17 +656,15 @@ public final class ObjectParser<Value, Context> extends AbstractObjectParser<Val
|
|||
if (parser.currentToken() != XContentParser.Token.END_ARRAY) {
|
||||
throwMustEndOn(currentFieldName, XContentParser.Token.END_ARRAY);
|
||||
}
|
||||
break;
|
||||
case END_OBJECT:
|
||||
case END_ARRAY:
|
||||
case FIELD_NAME:
|
||||
throw throwUnexpectedToken(parser, token);
|
||||
case VALUE_STRING:
|
||||
case VALUE_NUMBER:
|
||||
case VALUE_BOOLEAN:
|
||||
case VALUE_EMBEDDED_OBJECT:
|
||||
case VALUE_NULL:
|
||||
parseValue(parser, fieldParser, currentFieldName, value, context);
|
||||
}
|
||||
case END_OBJECT, END_ARRAY, FIELD_NAME -> throw throwUnexpectedToken(parser, token);
|
||||
case VALUE_STRING, VALUE_NUMBER, VALUE_BOOLEAN, VALUE_EMBEDDED_OBJECT, VALUE_NULL -> parseValue(
|
||||
parser,
|
||||
fieldParser,
|
||||
currentFieldName,
|
||||
value,
|
||||
context
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -137,18 +137,10 @@ public interface XContentGenerator extends Closeable, Flushable {
|
|||
break;
|
||||
case VALUE_NUMBER:
|
||||
switch (parser.numberType()) {
|
||||
case INT:
|
||||
writeNumber(parser.intValue());
|
||||
break;
|
||||
case LONG:
|
||||
writeNumber(parser.longValue());
|
||||
break;
|
||||
case FLOAT:
|
||||
writeNumber(parser.floatValue());
|
||||
break;
|
||||
case DOUBLE:
|
||||
writeNumber(parser.doubleValue());
|
||||
break;
|
||||
case INT -> writeNumber(parser.intValue());
|
||||
case LONG -> writeNumber(parser.longValue());
|
||||
case FLOAT -> writeNumber(parser.floatValue());
|
||||
case DOUBLE -> writeNumber(parser.doubleValue());
|
||||
}
|
||||
break;
|
||||
case VALUE_BOOLEAN:
|
||||
|
|
|
@ -413,21 +413,21 @@ public class JsonXContentGenerator implements XContentGenerator {
|
|||
}
|
||||
|
||||
switch (token) {
|
||||
case START_ARRAY:
|
||||
case START_ARRAY -> {
|
||||
destination.writeStartArray();
|
||||
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
|
||||
copyCurrentStructure(destination, parser);
|
||||
}
|
||||
destination.writeEndArray();
|
||||
break;
|
||||
case START_OBJECT:
|
||||
}
|
||||
case START_OBJECT -> {
|
||||
destination.writeStartObject();
|
||||
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
|
||||
copyCurrentStructure(destination, parser);
|
||||
}
|
||||
destination.writeEndObject();
|
||||
break;
|
||||
default: // others are simple:
|
||||
}
|
||||
default -> // others are simple:
|
||||
destination.copyCurrentEvent(parser);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -189,21 +189,14 @@ public class JsonXContentParser extends AbstractXContentParser {
|
|||
}
|
||||
|
||||
private NumberType convertNumberType(JsonParser.NumberType numberType) {
|
||||
switch (numberType) {
|
||||
case INT:
|
||||
return NumberType.INT;
|
||||
case BIG_INTEGER:
|
||||
return NumberType.BIG_INTEGER;
|
||||
case LONG:
|
||||
return NumberType.LONG;
|
||||
case FLOAT:
|
||||
return NumberType.FLOAT;
|
||||
case DOUBLE:
|
||||
return NumberType.DOUBLE;
|
||||
case BIG_DECIMAL:
|
||||
return NumberType.BIG_DECIMAL;
|
||||
}
|
||||
throw new IllegalStateException("No matching token for number_type [" + numberType + "]");
|
||||
return switch (numberType) {
|
||||
case INT -> NumberType.INT;
|
||||
case BIG_INTEGER -> NumberType.BIG_INTEGER;
|
||||
case LONG -> NumberType.LONG;
|
||||
case FLOAT -> NumberType.FLOAT;
|
||||
case DOUBLE -> NumberType.DOUBLE;
|
||||
case BIG_DECIMAL -> NumberType.BIG_DECIMAL;
|
||||
};
|
||||
}
|
||||
|
||||
private Token convertToken(JsonToken token) {
|
||||
|
|
|
@ -81,14 +81,11 @@ public abstract class AbstractXContentParser implements XContentParser {
|
|||
|
||||
@Override
|
||||
public boolean isBooleanValue() throws IOException {
|
||||
switch (currentToken()) {
|
||||
case VALUE_BOOLEAN:
|
||||
return true;
|
||||
case VALUE_STRING:
|
||||
return Booleans.isBoolean(textCharacters(), textOffset(), textLength());
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
return switch (currentToken()) {
|
||||
case VALUE_BOOLEAN -> true;
|
||||
case VALUE_STRING -> Booleans.isBoolean(textCharacters(), textOffset(), textLength());
|
||||
default -> false;
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -138,10 +138,8 @@ public class MapXContentParserTests extends ESTestCase {
|
|||
assertEquals(parser.textOrNull(), mapParser.textOrNull());
|
||||
}
|
||||
switch (token) {
|
||||
case VALUE_STRING:
|
||||
assertEquals(parser.text(), mapParser.text());
|
||||
break;
|
||||
case VALUE_NUMBER:
|
||||
case VALUE_STRING -> assertEquals(parser.text(), mapParser.text());
|
||||
case VALUE_NUMBER -> {
|
||||
assertEquals(parser.numberType(), mapParser.numberType());
|
||||
assertEquals(parser.numberValue(), mapParser.numberValue());
|
||||
if (parser.numberType() == XContentParser.NumberType.LONG
|
||||
|
@ -156,16 +154,10 @@ public class MapXContentParserTests extends ESTestCase {
|
|||
} else {
|
||||
assertEquals(parser.doubleValue(), mapParser.doubleValue(), 0.000001);
|
||||
}
|
||||
break;
|
||||
case VALUE_BOOLEAN:
|
||||
assertEquals(parser.booleanValue(), mapParser.booleanValue());
|
||||
break;
|
||||
case VALUE_EMBEDDED_OBJECT:
|
||||
assertArrayEquals(parser.binaryValue(), mapParser.binaryValue());
|
||||
break;
|
||||
case VALUE_NULL:
|
||||
assertNull(mapParser.textOrNull());
|
||||
break;
|
||||
}
|
||||
case VALUE_BOOLEAN -> assertEquals(parser.booleanValue(), mapParser.booleanValue());
|
||||
case VALUE_EMBEDDED_OBJECT -> assertArrayEquals(parser.binaryValue(), mapParser.binaryValue());
|
||||
case VALUE_NULL -> assertNull(mapParser.textOrNull());
|
||||
}
|
||||
assertEquals(parser.currentName(), mapParser.currentName());
|
||||
assertEquals(parser.isClosed(), mapParser.isClosed());
|
||||
|
|
|
@ -69,20 +69,9 @@ public class XContentParserTests extends ESTestCase {
|
|||
assertEquals(value, number.floatValue(), 0.0f);
|
||||
|
||||
switch (xContentType) {
|
||||
case VND_CBOR:
|
||||
case VND_SMILE:
|
||||
case CBOR:
|
||||
case SMILE:
|
||||
assertThat(number, instanceOf(Float.class));
|
||||
break;
|
||||
case VND_JSON:
|
||||
case VND_YAML:
|
||||
case JSON:
|
||||
case YAML:
|
||||
assertThat(number, instanceOf(Double.class));
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("unexpected x-content type [" + xContentType + "]");
|
||||
case VND_CBOR, VND_SMILE, CBOR, SMILE -> assertThat(number, instanceOf(Float.class));
|
||||
case VND_JSON, VND_YAML, JSON, YAML -> assertThat(number, instanceOf(Double.class));
|
||||
default -> throw new AssertionError("unexpected x-content type [" + xContentType + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -122,18 +122,12 @@ public abstract class AbstractXContentFilteringTestCase extends AbstractFilterin
|
|||
}
|
||||
assertThat(token1, equalTo(token2));
|
||||
switch (token1) {
|
||||
case FIELD_NAME:
|
||||
assertThat(jsonParser.currentName(), equalTo(testParser.currentName()));
|
||||
break;
|
||||
case VALUE_STRING:
|
||||
assertThat(jsonParser.text(), equalTo(testParser.text()));
|
||||
break;
|
||||
case VALUE_NUMBER:
|
||||
case FIELD_NAME -> assertThat(jsonParser.currentName(), equalTo(testParser.currentName()));
|
||||
case VALUE_STRING -> assertThat(jsonParser.text(), equalTo(testParser.text()));
|
||||
case VALUE_NUMBER -> {
|
||||
assertThat(jsonParser.numberType(), equalTo(testParser.numberType()));
|
||||
assertThat(jsonParser.numberValue(), equalTo(testParser.numberValue()));
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
|
|
|
@ -203,24 +203,16 @@ public class InternalMatrixStats extends InternalAggregation implements MatrixSt
|
|||
if (results == null) {
|
||||
return emptyMap();
|
||||
}
|
||||
switch (element) {
|
||||
case "counts":
|
||||
return results.getFieldCounts();
|
||||
case "means":
|
||||
return results.getMeans();
|
||||
case "variances":
|
||||
return results.getVariances();
|
||||
case "skewness":
|
||||
return results.getSkewness();
|
||||
case "kurtosis":
|
||||
return results.getKurtosis();
|
||||
case "covariance":
|
||||
return results.getCovariances();
|
||||
case "correlation":
|
||||
return results.getCorrelations();
|
||||
default:
|
||||
throw new IllegalArgumentException("Found unknown path element [" + element + "] in [" + getName() + "]");
|
||||
}
|
||||
return switch (element) {
|
||||
case "counts" -> results.getFieldCounts();
|
||||
case "means" -> results.getMeans();
|
||||
case "variances" -> results.getVariances();
|
||||
case "skewness" -> results.getSkewness();
|
||||
case "kurtosis" -> results.getKurtosis();
|
||||
case "covariance" -> results.getCovariances();
|
||||
case "correlation" -> results.getCorrelations();
|
||||
default -> throw new IllegalArgumentException("Found unknown path element [" + element + "] in [" + getName() + "]");
|
||||
};
|
||||
} else {
|
||||
throw new IllegalArgumentException("path not supported for [" + getName() + "]: " + path);
|
||||
}
|
||||
|
|
|
@ -47,23 +47,12 @@ public class CharGroupTokenizerFactory extends AbstractTokenizerFactory {
|
|||
tokenizeOnChars.add((int) parseEscapedChar(c));
|
||||
} else {
|
||||
switch (c) {
|
||||
case "letter":
|
||||
tokenizeOnLetter = true;
|
||||
break;
|
||||
case "digit":
|
||||
tokenizeOnDigit = true;
|
||||
break;
|
||||
case "whitespace":
|
||||
tokenizeOnSpace = true;
|
||||
break;
|
||||
case "punctuation":
|
||||
tokenizeOnPunctuation = true;
|
||||
break;
|
||||
case "symbol":
|
||||
tokenizeOnSymbol = true;
|
||||
break;
|
||||
default:
|
||||
throw new RuntimeException("Invalid escaped char in [" + c + "]");
|
||||
case "letter" -> tokenizeOnLetter = true;
|
||||
case "digit" -> tokenizeOnDigit = true;
|
||||
case "whitespace" -> tokenizeOnSpace = true;
|
||||
case "punctuation" -> tokenizeOnPunctuation = true;
|
||||
case "symbol" -> tokenizeOnSymbol = true;
|
||||
default -> throw new RuntimeException("Invalid escaped char in [" + c + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -38,14 +38,11 @@ public class EdgeNGramTokenFilterFactory extends AbstractTokenFilterFactory {
|
|||
}
|
||||
|
||||
static int parseSide(String side) {
|
||||
switch (side) {
|
||||
case "front":
|
||||
return SIDE_FRONT;
|
||||
case "back":
|
||||
return SIDE_BACK;
|
||||
default:
|
||||
throw new IllegalArgumentException("invalid side: " + side);
|
||||
}
|
||||
return switch (side) {
|
||||
case "front" -> SIDE_FRONT;
|
||||
case "back" -> SIDE_BACK;
|
||||
default -> throw new IllegalArgumentException("invalid side: " + side);
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -75,29 +75,17 @@ public class MappingCharFilterFactory extends AbstractCharFilterFactory implemen
|
|||
if (readPos >= len) throw new RuntimeException("Invalid escaped char in [" + s + "]");
|
||||
c = s.charAt(readPos++);
|
||||
switch (c) {
|
||||
case '\\':
|
||||
c = '\\';
|
||||
break;
|
||||
case 'n':
|
||||
c = '\n';
|
||||
break;
|
||||
case 't':
|
||||
c = '\t';
|
||||
break;
|
||||
case 'r':
|
||||
c = '\r';
|
||||
break;
|
||||
case 'b':
|
||||
c = '\b';
|
||||
break;
|
||||
case 'f':
|
||||
c = '\f';
|
||||
break;
|
||||
case 'u':
|
||||
case '\\' -> c = '\\';
|
||||
case 'n' -> c = '\n';
|
||||
case 't' -> c = '\t';
|
||||
case 'r' -> c = '\r';
|
||||
case 'b' -> c = '\b';
|
||||
case 'f' -> c = '\f';
|
||||
case 'u' -> {
|
||||
if (readPos + 3 >= len) throw new RuntimeException("Invalid escaped char in [" + s + "]");
|
||||
c = (char) Integer.parseInt(s.substring(readPos, readPos + 4), 16);
|
||||
readPos += 4;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
out[writePos++] = c;
|
||||
|
|
|
@ -152,29 +152,17 @@ public class WordDelimiterTokenFilterFactory extends AbstractTokenFilterFactory
|
|||
if (readPos >= len) throw new RuntimeException("Invalid escaped char in [" + s + "]");
|
||||
c = s.charAt(readPos++);
|
||||
switch (c) {
|
||||
case '\\':
|
||||
c = '\\';
|
||||
break;
|
||||
case 'n':
|
||||
c = '\n';
|
||||
break;
|
||||
case 't':
|
||||
c = '\t';
|
||||
break;
|
||||
case 'r':
|
||||
c = '\r';
|
||||
break;
|
||||
case 'b':
|
||||
c = '\b';
|
||||
break;
|
||||
case 'f':
|
||||
c = '\f';
|
||||
break;
|
||||
case 'u':
|
||||
case '\\' -> c = '\\';
|
||||
case 'n' -> c = '\n';
|
||||
case 't' -> c = '\t';
|
||||
case 'r' -> c = '\r';
|
||||
case 'b' -> c = '\b';
|
||||
case 'f' -> c = '\f';
|
||||
case 'u' -> {
|
||||
if (readPos + 3 >= len) throw new RuntimeException("Invalid escaped char in [" + s + "]");
|
||||
c = (char) Integer.parseInt(s.substring(readPos, readPos + 4), 16);
|
||||
readPos += 4;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
out[writePos++] = c;
|
||||
|
|
|
@ -226,25 +226,21 @@ public final class CommunityIdProcessor extends AbstractProcessor {
|
|||
flow.protocol = Transport.fromObject(protocol);
|
||||
|
||||
switch (flow.protocol) {
|
||||
case Tcp:
|
||||
case Udp:
|
||||
case Sctp:
|
||||
case Tcp, Udp, Sctp -> {
|
||||
flow.sourcePort = parseIntFromObjectOrString(sourcePort.get(), "source port");
|
||||
if (flow.sourcePort < 1 || flow.sourcePort > 65535) {
|
||||
throw new IllegalArgumentException("invalid source port [" + sourcePort.get() + "]");
|
||||
}
|
||||
|
||||
flow.destinationPort = parseIntFromObjectOrString(destinationPort.get(), "destination port");
|
||||
if (flow.destinationPort < 1 || flow.destinationPort > 65535) {
|
||||
throw new IllegalArgumentException("invalid destination port [" + destinationPort.get() + "]");
|
||||
}
|
||||
break;
|
||||
case Icmp:
|
||||
case IcmpIpV6:
|
||||
}
|
||||
case Icmp, IcmpIpV6 -> {
|
||||
// tolerate missing or invalid ICMP types and codes
|
||||
flow.icmpType = parseIntFromObjectOrString(icmpType, "icmp type");
|
||||
flow.icmpCode = parseIntFromObjectOrString(icmpCode, "icmp code");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return flow;
|
||||
|
@ -441,30 +437,19 @@ public final class CommunityIdProcessor extends AbstractProcessor {
|
|||
}
|
||||
|
||||
public static Transport fromNumber(int transportNumber) {
|
||||
switch (transportNumber) {
|
||||
case 1:
|
||||
return Icmp;
|
||||
case 2:
|
||||
return Igmp;
|
||||
case 6:
|
||||
return Tcp;
|
||||
case 17:
|
||||
return Udp;
|
||||
case 47:
|
||||
return Gre;
|
||||
case 58:
|
||||
return IcmpIpV6;
|
||||
case 88:
|
||||
return Eigrp;
|
||||
case 89:
|
||||
return Ospf;
|
||||
case 103:
|
||||
return Pim;
|
||||
case 132:
|
||||
return Sctp;
|
||||
default:
|
||||
throw new IllegalArgumentException("unknown transport protocol number [" + transportNumber + "]");
|
||||
}
|
||||
return switch (transportNumber) {
|
||||
case 1 -> Icmp;
|
||||
case 2 -> Igmp;
|
||||
case 6 -> Tcp;
|
||||
case 17 -> Udp;
|
||||
case 47 -> Gre;
|
||||
case 58 -> IcmpIpV6;
|
||||
case 88 -> Eigrp;
|
||||
case 89 -> Ospf;
|
||||
case 103 -> Pim;
|
||||
case 132 -> Sctp;
|
||||
default -> throw new IllegalArgumentException("unknown transport protocol number [" + transportNumber + "]");
|
||||
};
|
||||
}
|
||||
|
||||
public static Transport fromObject(Object o) {
|
||||
|
@ -557,55 +542,33 @@ public final class CommunityIdProcessor extends AbstractProcessor {
|
|||
}
|
||||
|
||||
public static IcmpType fromNumber(int type) {
|
||||
switch (type) {
|
||||
case 0:
|
||||
return EchoReply;
|
||||
case 8:
|
||||
return EchoRequest;
|
||||
case 9:
|
||||
return RouterAdvertisement;
|
||||
case 10:
|
||||
return RouterSolicitation;
|
||||
case 13:
|
||||
return TimestampRequest;
|
||||
case 14:
|
||||
return TimestampReply;
|
||||
case 15:
|
||||
return InfoRequest;
|
||||
case 16:
|
||||
return InfoReply;
|
||||
case 17:
|
||||
return AddressMaskRequest;
|
||||
case 18:
|
||||
return AddressMaskReply;
|
||||
case 128:
|
||||
return V6EchoRequest;
|
||||
case 129:
|
||||
return V6EchoReply;
|
||||
case 133:
|
||||
return V6RouterSolicitation;
|
||||
case 134:
|
||||
return V6RouterAdvertisement;
|
||||
case 135:
|
||||
return V6NeighborSolicitation;
|
||||
case 136:
|
||||
return V6NeighborAdvertisement;
|
||||
case 130:
|
||||
return V6MLDv1MulticastListenerQueryMessage;
|
||||
case 131:
|
||||
return V6MLDv1MulticastListenerReportMessage;
|
||||
case 139:
|
||||
return V6WhoAreYouRequest;
|
||||
case 140:
|
||||
return V6WhoAreYouReply;
|
||||
case 144:
|
||||
return V6HomeAddressDiscoveryRequest;
|
||||
case 145:
|
||||
return V6HomeAddressDiscoveryResponse;
|
||||
default:
|
||||
return switch (type) {
|
||||
case 0 -> EchoReply;
|
||||
case 8 -> EchoRequest;
|
||||
case 9 -> RouterAdvertisement;
|
||||
case 10 -> RouterSolicitation;
|
||||
case 13 -> TimestampRequest;
|
||||
case 14 -> TimestampReply;
|
||||
case 15 -> InfoRequest;
|
||||
case 16 -> InfoReply;
|
||||
case 17 -> AddressMaskRequest;
|
||||
case 18 -> AddressMaskReply;
|
||||
case 128 -> V6EchoRequest;
|
||||
case 129 -> V6EchoReply;
|
||||
case 133 -> V6RouterSolicitation;
|
||||
case 134 -> V6RouterAdvertisement;
|
||||
case 135 -> V6NeighborSolicitation;
|
||||
case 136 -> V6NeighborAdvertisement;
|
||||
case 130 -> V6MLDv1MulticastListenerQueryMessage;
|
||||
case 131 -> V6MLDv1MulticastListenerReportMessage;
|
||||
case 139 -> V6WhoAreYouRequest;
|
||||
case 140 -> V6WhoAreYouReply;
|
||||
case 144 -> V6HomeAddressDiscoveryRequest;
|
||||
case 145 -> V6HomeAddressDiscoveryResponse;
|
||||
default ->
|
||||
// don't fail if the type is unknown
|
||||
return EchoReply;
|
||||
}
|
||||
EchoReply;
|
||||
};
|
||||
}
|
||||
|
||||
public static Integer codeEquivalent(int icmpType, boolean isIpV6) {
|
||||
|
|
|
@ -75,14 +75,9 @@ final class CsvParser {
|
|||
|
||||
// we've reached end of string, we need to handle last field
|
||||
switch (state) {
|
||||
case UNQUOTED:
|
||||
setField(length);
|
||||
break;
|
||||
case QUOTED_END:
|
||||
setField(length - 1);
|
||||
break;
|
||||
case QUOTED:
|
||||
throw new IllegalArgumentException("Unmatched quote");
|
||||
case UNQUOTED -> setField(length);
|
||||
case QUOTED_END -> setField(length - 1);
|
||||
case QUOTED -> throw new IllegalArgumentException("Unmatched quote");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -118,17 +118,12 @@ enum DateFormat {
|
|||
abstract Function<String, ZonedDateTime> getFunction(String format, ZoneId timezone, Locale locale);
|
||||
|
||||
static DateFormat fromString(String format) {
|
||||
switch (format) {
|
||||
case "ISO8601":
|
||||
return Iso8601;
|
||||
case "UNIX":
|
||||
return Unix;
|
||||
case "UNIX_MS":
|
||||
return UnixMs;
|
||||
case "TAI64N":
|
||||
return Tai64n;
|
||||
default:
|
||||
return Java;
|
||||
}
|
||||
return switch (format) {
|
||||
case "ISO8601" -> Iso8601;
|
||||
case "UNIX" -> Unix;
|
||||
case "UNIX_MS" -> UnixMs;
|
||||
case "TAI64N" -> Tai64n;
|
||||
default -> Java;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -165,29 +165,18 @@ public class NetworkDirectionProcessor extends AbstractProcessor {
|
|||
|
||||
private boolean inNetwork(String ip, String network) {
|
||||
InetAddress address = InetAddresses.forString(ip);
|
||||
switch (network) {
|
||||
case LOOPBACK_NAMED_NETWORK:
|
||||
return isLoopback(address);
|
||||
case GLOBAL_UNICAST_NAMED_NETWORK:
|
||||
case UNICAST_NAMED_NETWORK:
|
||||
return isUnicast(address);
|
||||
case LINK_LOCAL_UNICAST_NAMED_NETWORK:
|
||||
return isLinkLocalUnicast(address);
|
||||
case INTERFACE_LOCAL_NAMED_NETWORK:
|
||||
return isInterfaceLocalMulticast(address);
|
||||
case LINK_LOCAL_MULTICAST_NAMED_NETWORK:
|
||||
return isLinkLocalMulticast(address);
|
||||
case MULTICAST_NAMED_NETWORK:
|
||||
return isMulticast(address);
|
||||
case UNSPECIFIED_NAMED_NETWORK:
|
||||
return isUnspecified(address);
|
||||
case PRIVATE_NAMED_NETWORK:
|
||||
return isPrivate(ip);
|
||||
case PUBLIC_NAMED_NETWORK:
|
||||
return isPublic(ip);
|
||||
default:
|
||||
return CIDRUtils.isInRange(ip, network);
|
||||
}
|
||||
return switch (network) {
|
||||
case LOOPBACK_NAMED_NETWORK -> isLoopback(address);
|
||||
case GLOBAL_UNICAST_NAMED_NETWORK, UNICAST_NAMED_NETWORK -> isUnicast(address);
|
||||
case LINK_LOCAL_UNICAST_NAMED_NETWORK -> isLinkLocalUnicast(address);
|
||||
case INTERFACE_LOCAL_NAMED_NETWORK -> isInterfaceLocalMulticast(address);
|
||||
case LINK_LOCAL_MULTICAST_NAMED_NETWORK -> isLinkLocalMulticast(address);
|
||||
case MULTICAST_NAMED_NETWORK -> isMulticast(address);
|
||||
case UNSPECIFIED_NAMED_NETWORK -> isUnspecified(address);
|
||||
case PRIVATE_NAMED_NETWORK -> isPrivate(ip);
|
||||
case PUBLIC_NAMED_NETWORK -> isPublic(ip);
|
||||
default -> CIDRUtils.isInRange(ip, network);
|
||||
};
|
||||
}
|
||||
|
||||
private boolean isLoopback(InetAddress ip) {
|
||||
|
|
|
@ -383,23 +383,22 @@ public class ConvertProcessorTests extends ESTestCase {
|
|||
Object fieldValue;
|
||||
String expectedFieldValue;
|
||||
switch (randomIntBetween(0, 2)) {
|
||||
case 0:
|
||||
case 0 -> {
|
||||
float randomFloat = randomFloat();
|
||||
fieldValue = randomFloat;
|
||||
expectedFieldValue = Float.toString(randomFloat);
|
||||
break;
|
||||
case 1:
|
||||
}
|
||||
case 1 -> {
|
||||
int randomInt = randomInt();
|
||||
fieldValue = randomInt;
|
||||
expectedFieldValue = Integer.toString(randomInt);
|
||||
break;
|
||||
case 2:
|
||||
}
|
||||
case 2 -> {
|
||||
boolean randomBoolean = randomBoolean();
|
||||
fieldValue = randomBoolean;
|
||||
expectedFieldValue = Boolean.toString(randomBoolean);
|
||||
break;
|
||||
default:
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
default -> throw new UnsupportedOperationException();
|
||||
}
|
||||
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue);
|
||||
|
||||
|
@ -417,33 +416,32 @@ public class ConvertProcessorTests extends ESTestCase {
|
|||
Object randomValue;
|
||||
String randomValueString;
|
||||
switch (randomIntBetween(0, 2)) {
|
||||
case 0:
|
||||
case 0 -> {
|
||||
float randomFloat = randomFloat();
|
||||
randomValue = randomFloat;
|
||||
randomValueString = Float.toString(randomFloat);
|
||||
break;
|
||||
case 1:
|
||||
}
|
||||
case 1 -> {
|
||||
int randomInt = randomInt();
|
||||
randomValue = randomInt;
|
||||
randomValueString = Integer.toString(randomInt);
|
||||
break;
|
||||
case 2:
|
||||
}
|
||||
case 2 -> {
|
||||
boolean randomBoolean = randomBoolean();
|
||||
randomValue = randomBoolean;
|
||||
randomValueString = Boolean.toString(randomBoolean);
|
||||
break;
|
||||
case 3:
|
||||
}
|
||||
case 3 -> {
|
||||
long randomLong = randomLong();
|
||||
randomValue = randomLong;
|
||||
randomValueString = Long.toString(randomLong);
|
||||
break;
|
||||
case 4:
|
||||
}
|
||||
case 4 -> {
|
||||
double randomDouble = randomDouble();
|
||||
randomValue = randomDouble;
|
||||
randomValueString = Double.toString(randomDouble);
|
||||
break;
|
||||
default:
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
default -> throw new UnsupportedOperationException();
|
||||
}
|
||||
fieldValue.add(randomValue);
|
||||
expectedList.add(randomValueString);
|
||||
|
@ -501,20 +499,19 @@ public class ConvertProcessorTests extends ESTestCase {
|
|||
public void testAutoConvertNotString() throws Exception {
|
||||
Object randomValue;
|
||||
switch (randomIntBetween(0, 2)) {
|
||||
case 0:
|
||||
case 0 -> {
|
||||
float randomFloat = randomFloat();
|
||||
randomValue = randomFloat;
|
||||
break;
|
||||
case 1:
|
||||
}
|
||||
case 1 -> {
|
||||
int randomInt = randomInt();
|
||||
randomValue = randomInt;
|
||||
break;
|
||||
case 2:
|
||||
}
|
||||
case 2 -> {
|
||||
boolean randomBoolean = randomBoolean();
|
||||
randomValue = randomBoolean;
|
||||
break;
|
||||
default:
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
default -> throw new UnsupportedOperationException();
|
||||
}
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("field", randomValue));
|
||||
Processor processor = new ConvertProcessor(randomAlphaOfLength(10), null, "field", "field", Type.AUTO, false);
|
||||
|
|
|
@ -224,28 +224,26 @@ public final class GeoIpProcessor extends AbstractProcessor {
|
|||
Map<String, Object> geoData = new HashMap<>();
|
||||
for (Property property : this.properties) {
|
||||
switch (property) {
|
||||
case IP:
|
||||
geoData.put("ip", NetworkAddress.format(ipAddress));
|
||||
break;
|
||||
case COUNTRY_ISO_CODE:
|
||||
case IP -> geoData.put("ip", NetworkAddress.format(ipAddress));
|
||||
case COUNTRY_ISO_CODE -> {
|
||||
String countryIsoCode = country.getIsoCode();
|
||||
if (countryIsoCode != null) {
|
||||
geoData.put("country_iso_code", countryIsoCode);
|
||||
}
|
||||
break;
|
||||
case COUNTRY_NAME:
|
||||
}
|
||||
case COUNTRY_NAME -> {
|
||||
String countryName = country.getName();
|
||||
if (countryName != null) {
|
||||
geoData.put("country_name", countryName);
|
||||
}
|
||||
break;
|
||||
case CONTINENT_NAME:
|
||||
}
|
||||
case CONTINENT_NAME -> {
|
||||
String continentName = continent.getName();
|
||||
if (continentName != null) {
|
||||
geoData.put("continent_name", continentName);
|
||||
}
|
||||
break;
|
||||
case REGION_ISO_CODE:
|
||||
}
|
||||
case REGION_ISO_CODE -> {
|
||||
// ISO 3166-2 code for country subdivisions.
|
||||
// See iso.org/iso-3166-country-codes.html
|
||||
String countryIso = country.getIsoCode();
|
||||
|
@ -254,26 +252,26 @@ public final class GeoIpProcessor extends AbstractProcessor {
|
|||
String regionIsoCode = countryIso + "-" + subdivisionIso;
|
||||
geoData.put("region_iso_code", regionIsoCode);
|
||||
}
|
||||
break;
|
||||
case REGION_NAME:
|
||||
}
|
||||
case REGION_NAME -> {
|
||||
String subdivisionName = subdivision.getName();
|
||||
if (subdivisionName != null) {
|
||||
geoData.put("region_name", subdivisionName);
|
||||
}
|
||||
break;
|
||||
case CITY_NAME:
|
||||
}
|
||||
case CITY_NAME -> {
|
||||
String cityName = city.getName();
|
||||
if (cityName != null) {
|
||||
geoData.put("city_name", cityName);
|
||||
}
|
||||
break;
|
||||
case TIMEZONE:
|
||||
}
|
||||
case TIMEZONE -> {
|
||||
String locationTimeZone = location.getTimeZone();
|
||||
if (locationTimeZone != null) {
|
||||
geoData.put("timezone", locationTimeZone);
|
||||
}
|
||||
break;
|
||||
case LOCATION:
|
||||
}
|
||||
case LOCATION -> {
|
||||
Double latitude = location.getLatitude();
|
||||
Double longitude = location.getLongitude();
|
||||
if (latitude != null && longitude != null) {
|
||||
|
@ -282,7 +280,7 @@ public final class GeoIpProcessor extends AbstractProcessor {
|
|||
locationObject.put("lon", longitude);
|
||||
geoData.put("location", locationObject);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return geoData;
|
||||
|
@ -299,27 +297,25 @@ public final class GeoIpProcessor extends AbstractProcessor {
|
|||
Map<String, Object> geoData = new HashMap<>();
|
||||
for (Property property : this.properties) {
|
||||
switch (property) {
|
||||
case IP:
|
||||
geoData.put("ip", NetworkAddress.format(ipAddress));
|
||||
break;
|
||||
case COUNTRY_ISO_CODE:
|
||||
case IP -> geoData.put("ip", NetworkAddress.format(ipAddress));
|
||||
case COUNTRY_ISO_CODE -> {
|
||||
String countryIsoCode = country.getIsoCode();
|
||||
if (countryIsoCode != null) {
|
||||
geoData.put("country_iso_code", countryIsoCode);
|
||||
}
|
||||
break;
|
||||
case COUNTRY_NAME:
|
||||
}
|
||||
case COUNTRY_NAME -> {
|
||||
String countryName = country.getName();
|
||||
if (countryName != null) {
|
||||
geoData.put("country_name", countryName);
|
||||
}
|
||||
break;
|
||||
case CONTINENT_NAME:
|
||||
}
|
||||
case CONTINENT_NAME -> {
|
||||
String continentName = continent.getName();
|
||||
if (continentName != null) {
|
||||
geoData.put("continent_name", continentName);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return geoData;
|
||||
|
|
|
@ -45,52 +45,36 @@ final class DateField {
|
|||
static final String GET_SECONDS_METHOD = "getSeconds";
|
||||
|
||||
static DoubleValuesSource getVariable(IndexFieldData<?> fieldData, String fieldName, String variable) {
|
||||
switch (variable) {
|
||||
case VALUE_VARIABLE:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.MIN);
|
||||
case EMPTY_VARIABLE:
|
||||
return new EmptyMemberValueSource(fieldData);
|
||||
case LENGTH_VARIABLE:
|
||||
return new CountMethodValueSource(fieldData);
|
||||
default:
|
||||
throw new IllegalArgumentException("Member variable [" + variable + "] does not exist for date field [" + fieldName + "].");
|
||||
}
|
||||
return switch (variable) {
|
||||
case VALUE_VARIABLE -> new FieldDataValueSource(fieldData, MultiValueMode.MIN);
|
||||
case EMPTY_VARIABLE -> new EmptyMemberValueSource(fieldData);
|
||||
case LENGTH_VARIABLE -> new CountMethodValueSource(fieldData);
|
||||
default -> throw new IllegalArgumentException(
|
||||
"Member variable [" + variable + "] does not exist for date field [" + fieldName + "]."
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
static DoubleValuesSource getMethod(IndexFieldData<?> fieldData, String fieldName, String method) {
|
||||
switch (method) {
|
||||
case GETVALUE_METHOD:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.MIN);
|
||||
case ISEMPTY_METHOD:
|
||||
return new EmptyMemberValueSource(fieldData);
|
||||
case SIZE_METHOD:
|
||||
return new CountMethodValueSource(fieldData);
|
||||
case MINIMUM_METHOD:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.MIN);
|
||||
case MAXIMUM_METHOD:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.MAX);
|
||||
case AVERAGE_METHOD:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.AVG);
|
||||
case MEDIAN_METHOD:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.MEDIAN);
|
||||
case SUM_METHOD:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.SUM);
|
||||
case COUNT_METHOD:
|
||||
return new CountMethodValueSource(fieldData);
|
||||
case GET_YEAR_METHOD:
|
||||
return new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.YEAR);
|
||||
case GET_MONTH_METHOD:
|
||||
return new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.MONTH);
|
||||
case GET_DAY_OF_MONTH_METHOD:
|
||||
return new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.DAY_OF_MONTH);
|
||||
case GET_HOUR_OF_DAY_METHOD:
|
||||
return new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.HOUR_OF_DAY);
|
||||
case GET_MINUTES_METHOD:
|
||||
return new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.MINUTE);
|
||||
case GET_SECONDS_METHOD:
|
||||
return new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.SECOND);
|
||||
default:
|
||||
throw new IllegalArgumentException("Member method [" + method + "] does not exist for date field [" + fieldName + "].");
|
||||
}
|
||||
return switch (method) {
|
||||
case GETVALUE_METHOD -> new FieldDataValueSource(fieldData, MultiValueMode.MIN);
|
||||
case ISEMPTY_METHOD -> new EmptyMemberValueSource(fieldData);
|
||||
case SIZE_METHOD -> new CountMethodValueSource(fieldData);
|
||||
case MINIMUM_METHOD -> new FieldDataValueSource(fieldData, MultiValueMode.MIN);
|
||||
case MAXIMUM_METHOD -> new FieldDataValueSource(fieldData, MultiValueMode.MAX);
|
||||
case AVERAGE_METHOD -> new FieldDataValueSource(fieldData, MultiValueMode.AVG);
|
||||
case MEDIAN_METHOD -> new FieldDataValueSource(fieldData, MultiValueMode.MEDIAN);
|
||||
case SUM_METHOD -> new FieldDataValueSource(fieldData, MultiValueMode.SUM);
|
||||
case COUNT_METHOD -> new CountMethodValueSource(fieldData);
|
||||
case GET_YEAR_METHOD -> new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.YEAR);
|
||||
case GET_MONTH_METHOD -> new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.MONTH);
|
||||
case GET_DAY_OF_MONTH_METHOD -> new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.DAY_OF_MONTH);
|
||||
case GET_HOUR_OF_DAY_METHOD -> new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.HOUR_OF_DAY);
|
||||
case GET_MINUTES_METHOD -> new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.MINUTE);
|
||||
case GET_SECONDS_METHOD -> new DateMethodValueSource(fieldData, MultiValueMode.MIN, method, Calendar.SECOND);
|
||||
default -> throw new IllegalArgumentException(
|
||||
"Member method [" + method + "] does not exist for date field [" + fieldName + "]."
|
||||
);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -64,112 +64,154 @@ final class DateObject {
|
|||
static final String GETYEAR_OF_ERA_METHOD = "getYearOfEra";
|
||||
|
||||
static DoubleValuesSource getVariable(IndexFieldData<?> fieldData, String fieldName, String variable) {
|
||||
switch (variable) {
|
||||
case CENTURY_OF_ERA_VARIABLE:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, zdt -> zdt.get(ChronoField.YEAR_OF_ERA) / 100);
|
||||
case DAY_OF_MONTH_VARIABLE:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, ZonedDateTime::getDayOfMonth);
|
||||
case DAY_OF_WEEK_VARIABLE:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, zdt -> zdt.getDayOfWeek().getValue());
|
||||
case DAY_OF_YEAR_VARIABLE:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, ZonedDateTime::getDayOfYear);
|
||||
case ERA_VARIABLE:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, zdt -> zdt.get(ChronoField.ERA));
|
||||
case HOUR_OF_DAY_VARIABLE:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, ZonedDateTime::getHour);
|
||||
case MILLIS_OF_DAY_VARIABLE:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, zdt -> zdt.get(ChronoField.MILLI_OF_DAY));
|
||||
case MILLIS_OF_SECOND_VARIABLE:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, zdt -> zdt.get(ChronoField.MILLI_OF_SECOND));
|
||||
case MINUTE_OF_DAY_VARIABLE:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, zdt -> zdt.get(ChronoField.MINUTE_OF_DAY));
|
||||
case MINUTE_OF_HOUR_VARIABLE:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, ZonedDateTime::getMinute);
|
||||
case MONTH_OF_YEAR_VARIABLE:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, ZonedDateTime::getMonthValue);
|
||||
case SECOND_OF_DAY_VARIABLE:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, zdt -> zdt.get(ChronoField.SECOND_OF_DAY));
|
||||
case SECOND_OF_MINUTE_VARIABLE:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, ZonedDateTime::getSecond);
|
||||
case WEEK_OF_WEEK_YEAR_VARIABLE:
|
||||
return new DateObjectValueSource(
|
||||
fieldData,
|
||||
MultiValueMode.MIN,
|
||||
variable,
|
||||
zdt -> zdt.get(DateFormatters.WEEK_FIELDS_ROOT.weekOfWeekBasedYear())
|
||||
);
|
||||
case WEEK_YEAR_VARIABLE:
|
||||
return new DateObjectValueSource(
|
||||
fieldData,
|
||||
MultiValueMode.MIN,
|
||||
variable,
|
||||
zdt -> zdt.get(DateFormatters.WEEK_FIELDS_ROOT.weekBasedYear())
|
||||
);
|
||||
case YEAR_VARIABLE:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, ZonedDateTime::getYear);
|
||||
case YEAR_OF_CENTURY_VARIABLE:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, zdt -> zdt.get(ChronoField.YEAR_OF_ERA) % 100);
|
||||
case YEAR_OF_ERA_VARIABLE:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, zdt -> zdt.get(ChronoField.YEAR_OF_ERA));
|
||||
default:
|
||||
throw new IllegalArgumentException(
|
||||
"Member variable [" + variable + "] does not exist for date object on field [" + fieldName + "]."
|
||||
);
|
||||
}
|
||||
return switch (variable) {
|
||||
case CENTURY_OF_ERA_VARIABLE -> new DateObjectValueSource(
|
||||
fieldData,
|
||||
MultiValueMode.MIN,
|
||||
variable,
|
||||
zdt -> zdt.get(ChronoField.YEAR_OF_ERA) / 100
|
||||
);
|
||||
case DAY_OF_MONTH_VARIABLE -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, ZonedDateTime::getDayOfMonth);
|
||||
case DAY_OF_WEEK_VARIABLE -> new DateObjectValueSource(
|
||||
fieldData,
|
||||
MultiValueMode.MIN,
|
||||
variable,
|
||||
zdt -> zdt.getDayOfWeek().getValue()
|
||||
);
|
||||
case DAY_OF_YEAR_VARIABLE -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, ZonedDateTime::getDayOfYear);
|
||||
case ERA_VARIABLE -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, zdt -> zdt.get(ChronoField.ERA));
|
||||
case HOUR_OF_DAY_VARIABLE -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, ZonedDateTime::getHour);
|
||||
case MILLIS_OF_DAY_VARIABLE -> new DateObjectValueSource(
|
||||
fieldData,
|
||||
MultiValueMode.MIN,
|
||||
variable,
|
||||
zdt -> zdt.get(ChronoField.MILLI_OF_DAY)
|
||||
);
|
||||
case MILLIS_OF_SECOND_VARIABLE -> new DateObjectValueSource(
|
||||
fieldData,
|
||||
MultiValueMode.MIN,
|
||||
variable,
|
||||
zdt -> zdt.get(ChronoField.MILLI_OF_SECOND)
|
||||
);
|
||||
case MINUTE_OF_DAY_VARIABLE -> new DateObjectValueSource(
|
||||
fieldData,
|
||||
MultiValueMode.MIN,
|
||||
variable,
|
||||
zdt -> zdt.get(ChronoField.MINUTE_OF_DAY)
|
||||
);
|
||||
case MINUTE_OF_HOUR_VARIABLE -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, ZonedDateTime::getMinute);
|
||||
case MONTH_OF_YEAR_VARIABLE -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, ZonedDateTime::getMonthValue);
|
||||
case SECOND_OF_DAY_VARIABLE -> new DateObjectValueSource(
|
||||
fieldData,
|
||||
MultiValueMode.MIN,
|
||||
variable,
|
||||
zdt -> zdt.get(ChronoField.SECOND_OF_DAY)
|
||||
);
|
||||
case SECOND_OF_MINUTE_VARIABLE -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, ZonedDateTime::getSecond);
|
||||
case WEEK_OF_WEEK_YEAR_VARIABLE -> new DateObjectValueSource(
|
||||
fieldData,
|
||||
MultiValueMode.MIN,
|
||||
variable,
|
||||
zdt -> zdt.get(DateFormatters.WEEK_FIELDS_ROOT.weekOfWeekBasedYear())
|
||||
);
|
||||
case WEEK_YEAR_VARIABLE -> new DateObjectValueSource(
|
||||
fieldData,
|
||||
MultiValueMode.MIN,
|
||||
variable,
|
||||
zdt -> zdt.get(DateFormatters.WEEK_FIELDS_ROOT.weekBasedYear())
|
||||
);
|
||||
case YEAR_VARIABLE -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, variable, ZonedDateTime::getYear);
|
||||
case YEAR_OF_CENTURY_VARIABLE -> new DateObjectValueSource(
|
||||
fieldData,
|
||||
MultiValueMode.MIN,
|
||||
variable,
|
||||
zdt -> zdt.get(ChronoField.YEAR_OF_ERA) % 100
|
||||
);
|
||||
case YEAR_OF_ERA_VARIABLE -> new DateObjectValueSource(
|
||||
fieldData,
|
||||
MultiValueMode.MIN,
|
||||
variable,
|
||||
zdt -> zdt.get(ChronoField.YEAR_OF_ERA)
|
||||
);
|
||||
default -> throw new IllegalArgumentException(
|
||||
"Member variable [" + variable + "] does not exist for date object on field [" + fieldName + "]."
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
static DoubleValuesSource getMethod(IndexFieldData<?> fieldData, String fieldName, String method) {
|
||||
switch (method) {
|
||||
case GETCENTURY_OF_ERA_METHOD:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, zdt -> zdt.get(ChronoField.YEAR_OF_ERA) / 100);
|
||||
case GETDAY_OF_MONTH_METHOD:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, ZonedDateTime::getDayOfMonth);
|
||||
case GETDAY_OF_WEEK_METHOD:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, zdt -> zdt.getDayOfWeek().getValue());
|
||||
case GETDAY_OF_YEAR_METHOD:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, ZonedDateTime::getDayOfYear);
|
||||
case GETERA_METHOD:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, zdt -> zdt.get(ChronoField.ERA));
|
||||
case GETHOUR_OF_DAY_METHOD:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, ZonedDateTime::getHour);
|
||||
case GETMILLIS_OF_DAY_METHOD:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, zdt -> zdt.get(ChronoField.MILLI_OF_DAY));
|
||||
case GETMILLIS_OF_SECOND_METHOD:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, zdt -> zdt.get(ChronoField.MILLI_OF_SECOND));
|
||||
case GETMINUTE_OF_DAY_METHOD:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, zdt -> zdt.get(ChronoField.MINUTE_OF_DAY));
|
||||
case GETMINUTE_OF_HOUR_METHOD:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, ZonedDateTime::getMinute);
|
||||
case GETMONTH_OF_YEAR_METHOD:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, ZonedDateTime::getMonthValue);
|
||||
case GETSECOND_OF_DAY_METHOD:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, zdt -> zdt.get(ChronoField.SECOND_OF_DAY));
|
||||
case GETSECOND_OF_MINUTE_METHOD:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, ZonedDateTime::getSecond);
|
||||
case GETWEEK_OF_WEEK_YEAR_METHOD:
|
||||
return new DateObjectValueSource(
|
||||
fieldData,
|
||||
MultiValueMode.MIN,
|
||||
method,
|
||||
zdt -> zdt.get(DateFormatters.WEEK_FIELDS_ROOT.weekOfWeekBasedYear())
|
||||
);
|
||||
case GETWEEK_YEAR_METHOD:
|
||||
return new DateObjectValueSource(
|
||||
fieldData,
|
||||
MultiValueMode.MIN,
|
||||
method,
|
||||
zdt -> zdt.get(DateFormatters.WEEK_FIELDS_ROOT.weekBasedYear())
|
||||
);
|
||||
case GETYEAR_METHOD:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, ZonedDateTime::getYear);
|
||||
case GETYEAR_OF_CENTURY_METHOD:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, zdt -> zdt.get(ChronoField.YEAR_OF_ERA) % 100);
|
||||
case GETYEAR_OF_ERA_METHOD:
|
||||
return new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, zdt -> zdt.get(ChronoField.YEAR_OF_ERA));
|
||||
default:
|
||||
throw new IllegalArgumentException(
|
||||
"Member method [" + method + "] does not exist for date object on field [" + fieldName + "]."
|
||||
);
|
||||
}
|
||||
return switch (method) {
|
||||
case GETCENTURY_OF_ERA_METHOD -> new DateObjectValueSource(
|
||||
fieldData,
|
||||
MultiValueMode.MIN,
|
||||
method,
|
||||
zdt -> zdt.get(ChronoField.YEAR_OF_ERA) / 100
|
||||
);
|
||||
case GETDAY_OF_MONTH_METHOD -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, ZonedDateTime::getDayOfMonth);
|
||||
case GETDAY_OF_WEEK_METHOD -> new DateObjectValueSource(
|
||||
fieldData,
|
||||
MultiValueMode.MIN,
|
||||
method,
|
||||
zdt -> zdt.getDayOfWeek().getValue()
|
||||
);
|
||||
case GETDAY_OF_YEAR_METHOD -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, ZonedDateTime::getDayOfYear);
|
||||
case GETERA_METHOD -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, zdt -> zdt.get(ChronoField.ERA));
|
||||
case GETHOUR_OF_DAY_METHOD -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, ZonedDateTime::getHour);
|
||||
case GETMILLIS_OF_DAY_METHOD -> new DateObjectValueSource(
|
||||
fieldData,
|
||||
MultiValueMode.MIN,
|
||||
method,
|
||||
zdt -> zdt.get(ChronoField.MILLI_OF_DAY)
|
||||
);
|
||||
case GETMILLIS_OF_SECOND_METHOD -> new DateObjectValueSource(
|
||||
fieldData,
|
||||
MultiValueMode.MIN,
|
||||
method,
|
||||
zdt -> zdt.get(ChronoField.MILLI_OF_SECOND)
|
||||
);
|
||||
case GETMINUTE_OF_DAY_METHOD -> new DateObjectValueSource(
|
||||
fieldData,
|
||||
MultiValueMode.MIN,
|
||||
method,
|
||||
zdt -> zdt.get(ChronoField.MINUTE_OF_DAY)
|
||||
);
|
||||
case GETMINUTE_OF_HOUR_METHOD -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, ZonedDateTime::getMinute);
|
||||
case GETMONTH_OF_YEAR_METHOD -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, ZonedDateTime::getMonthValue);
|
||||
case GETSECOND_OF_DAY_METHOD -> new DateObjectValueSource(
|
||||
fieldData,
|
||||
MultiValueMode.MIN,
|
||||
method,
|
||||
zdt -> zdt.get(ChronoField.SECOND_OF_DAY)
|
||||
);
|
||||
case GETSECOND_OF_MINUTE_METHOD -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, ZonedDateTime::getSecond);
|
||||
case GETWEEK_OF_WEEK_YEAR_METHOD -> new DateObjectValueSource(
|
||||
fieldData,
|
||||
MultiValueMode.MIN,
|
||||
method,
|
||||
zdt -> zdt.get(DateFormatters.WEEK_FIELDS_ROOT.weekOfWeekBasedYear())
|
||||
);
|
||||
case GETWEEK_YEAR_METHOD -> new DateObjectValueSource(
|
||||
fieldData,
|
||||
MultiValueMode.MIN,
|
||||
method,
|
||||
zdt -> zdt.get(DateFormatters.WEEK_FIELDS_ROOT.weekBasedYear())
|
||||
);
|
||||
case GETYEAR_METHOD -> new DateObjectValueSource(fieldData, MultiValueMode.MIN, method, ZonedDateTime::getYear);
|
||||
case GETYEAR_OF_CENTURY_METHOD -> new DateObjectValueSource(
|
||||
fieldData,
|
||||
MultiValueMode.MIN,
|
||||
method,
|
||||
zdt -> zdt.get(ChronoField.YEAR_OF_ERA) % 100
|
||||
);
|
||||
case GETYEAR_OF_ERA_METHOD -> new DateObjectValueSource(
|
||||
fieldData,
|
||||
MultiValueMode.MIN,
|
||||
method,
|
||||
zdt -> zdt.get(ChronoField.YEAR_OF_ERA)
|
||||
);
|
||||
default -> throw new IllegalArgumentException(
|
||||
"Member method [" + method + "] does not exist for date object on field [" + fieldName + "]."
|
||||
);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,28 +29,24 @@ final class GeoField {
|
|||
static final String GETLON_METHOD = "getLon";
|
||||
|
||||
static DoubleValuesSource getVariable(IndexFieldData<?> fieldData, String fieldName, String variable) {
|
||||
switch (variable) {
|
||||
case EMPTY_VARIABLE:
|
||||
return new GeoEmptyValueSource(fieldData);
|
||||
case LAT_VARIABLE:
|
||||
return new GeoLatitudeValueSource(fieldData);
|
||||
case LON_VARIABLE:
|
||||
return new GeoLongitudeValueSource(fieldData);
|
||||
default:
|
||||
throw new IllegalArgumentException("Member variable [" + variable + "] does not exist for geo field [" + fieldName + "].");
|
||||
}
|
||||
return switch (variable) {
|
||||
case EMPTY_VARIABLE -> new GeoEmptyValueSource(fieldData);
|
||||
case LAT_VARIABLE -> new GeoLatitudeValueSource(fieldData);
|
||||
case LON_VARIABLE -> new GeoLongitudeValueSource(fieldData);
|
||||
default -> throw new IllegalArgumentException(
|
||||
"Member variable [" + variable + "] does not exist for geo field [" + fieldName + "]."
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
static DoubleValuesSource getMethod(IndexFieldData<?> fieldData, String fieldName, String method) {
|
||||
switch (method) {
|
||||
case ISEMPTY_METHOD:
|
||||
return new GeoEmptyValueSource(fieldData);
|
||||
case GETLAT_METHOD:
|
||||
return new GeoLatitudeValueSource(fieldData);
|
||||
case GETLON_METHOD:
|
||||
return new GeoLongitudeValueSource(fieldData);
|
||||
default:
|
||||
throw new IllegalArgumentException("Member method [" + method + "] does not exist for geo field [" + fieldName + "].");
|
||||
}
|
||||
return switch (method) {
|
||||
case ISEMPTY_METHOD -> new GeoEmptyValueSource(fieldData);
|
||||
case GETLAT_METHOD -> new GeoLatitudeValueSource(fieldData);
|
||||
case GETLON_METHOD -> new GeoLongitudeValueSource(fieldData);
|
||||
default -> throw new IllegalArgumentException(
|
||||
"Member method [" + method + "] does not exist for geo field [" + fieldName + "]."
|
||||
);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,42 +36,30 @@ final class NumericField {
|
|||
static final String COUNT_METHOD = "count";
|
||||
|
||||
static DoubleValuesSource getVariable(IndexFieldData<?> fieldData, String fieldName, String variable) {
|
||||
switch (variable) {
|
||||
case VALUE_VARIABLE:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.MIN);
|
||||
case EMPTY_VARIABLE:
|
||||
return new EmptyMemberValueSource(fieldData);
|
||||
case LENGTH_VARIABLE:
|
||||
return new CountMethodValueSource(fieldData);
|
||||
default:
|
||||
throw new IllegalArgumentException(
|
||||
"Member variable [" + variable + "] does not exist for " + "numeric field [" + fieldName + "]."
|
||||
);
|
||||
}
|
||||
return switch (variable) {
|
||||
case VALUE_VARIABLE -> new FieldDataValueSource(fieldData, MultiValueMode.MIN);
|
||||
case EMPTY_VARIABLE -> new EmptyMemberValueSource(fieldData);
|
||||
case LENGTH_VARIABLE -> new CountMethodValueSource(fieldData);
|
||||
default -> throw new IllegalArgumentException(
|
||||
"Member variable [" + variable + "] does not exist for " + "numeric field [" + fieldName + "]."
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
static DoubleValuesSource getMethod(IndexFieldData<?> fieldData, String fieldName, String method) {
|
||||
switch (method) {
|
||||
case GETVALUE_METHOD:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.MIN);
|
||||
case ISEMPTY_METHOD:
|
||||
return new EmptyMemberValueSource(fieldData);
|
||||
case SIZE_METHOD:
|
||||
return new CountMethodValueSource(fieldData);
|
||||
case MINIMUM_METHOD:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.MIN);
|
||||
case MAXIMUM_METHOD:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.MAX);
|
||||
case AVERAGE_METHOD:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.AVG);
|
||||
case MEDIAN_METHOD:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.MEDIAN);
|
||||
case SUM_METHOD:
|
||||
return new FieldDataValueSource(fieldData, MultiValueMode.SUM);
|
||||
case COUNT_METHOD:
|
||||
return new CountMethodValueSource(fieldData);
|
||||
default:
|
||||
throw new IllegalArgumentException("Member method [" + method + "] does not exist for numeric field [" + fieldName + "].");
|
||||
}
|
||||
return switch (method) {
|
||||
case GETVALUE_METHOD -> new FieldDataValueSource(fieldData, MultiValueMode.MIN);
|
||||
case ISEMPTY_METHOD -> new EmptyMemberValueSource(fieldData);
|
||||
case SIZE_METHOD -> new CountMethodValueSource(fieldData);
|
||||
case MINIMUM_METHOD -> new FieldDataValueSource(fieldData, MultiValueMode.MIN);
|
||||
case MAXIMUM_METHOD -> new FieldDataValueSource(fieldData, MultiValueMode.MAX);
|
||||
case AVERAGE_METHOD -> new FieldDataValueSource(fieldData, MultiValueMode.AVG);
|
||||
case MEDIAN_METHOD -> new FieldDataValueSource(fieldData, MultiValueMode.MEDIAN);
|
||||
case SUM_METHOD -> new FieldDataValueSource(fieldData, MultiValueMode.SUM);
|
||||
case COUNT_METHOD -> new CountMethodValueSource(fieldData);
|
||||
default -> throw new IllegalArgumentException(
|
||||
"Member method [" + method + "] does not exist for numeric field [" + fieldName + "]."
|
||||
);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -156,34 +156,34 @@ public final class DefBootstrap {
|
|||
* Does a slow lookup against the whitelist.
|
||||
*/
|
||||
private MethodHandle lookup(int flavorValue, String nameValue, Class<?> receiver) throws Throwable {
|
||||
switch (flavorValue) {
|
||||
case METHOD_CALL:
|
||||
return Def.lookupMethod(painlessLookup, functions, constants, methodHandlesLookup, type(), receiver, nameValue, args);
|
||||
case LOAD:
|
||||
return Def.lookupGetter(painlessLookup, receiver, nameValue);
|
||||
case STORE:
|
||||
return Def.lookupSetter(painlessLookup, receiver, nameValue);
|
||||
case ARRAY_LOAD:
|
||||
return Def.lookupArrayLoad(receiver);
|
||||
case ARRAY_STORE:
|
||||
return Def.lookupArrayStore(receiver);
|
||||
case ITERATOR:
|
||||
return Def.lookupIterator(receiver);
|
||||
case REFERENCE:
|
||||
return Def.lookupReference(
|
||||
painlessLookup,
|
||||
functions,
|
||||
constants,
|
||||
methodHandlesLookup,
|
||||
(String) args[0],
|
||||
receiver,
|
||||
nameValue
|
||||
);
|
||||
case INDEX_NORMALIZE:
|
||||
return Def.lookupIndexNormalize(receiver);
|
||||
default:
|
||||
throw new AssertionError();
|
||||
}
|
||||
return switch (flavorValue) {
|
||||
case METHOD_CALL -> Def.lookupMethod(
|
||||
painlessLookup,
|
||||
functions,
|
||||
constants,
|
||||
methodHandlesLookup,
|
||||
type(),
|
||||
receiver,
|
||||
nameValue,
|
||||
args
|
||||
);
|
||||
case LOAD -> Def.lookupGetter(painlessLookup, receiver, nameValue);
|
||||
case STORE -> Def.lookupSetter(painlessLookup, receiver, nameValue);
|
||||
case ARRAY_LOAD -> Def.lookupArrayLoad(receiver);
|
||||
case ARRAY_STORE -> Def.lookupArrayStore(receiver);
|
||||
case ITERATOR -> Def.lookupIterator(receiver);
|
||||
case REFERENCE -> Def.lookupReference(
|
||||
painlessLookup,
|
||||
functions,
|
||||
constants,
|
||||
methodHandlesLookup,
|
||||
(String) args[0],
|
||||
receiver,
|
||||
nameValue
|
||||
);
|
||||
case INDEX_NORMALIZE -> Def.lookupIndexNormalize(receiver);
|
||||
default -> throw new AssertionError();
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -493,7 +493,7 @@ public final class DefBootstrap {
|
|||
// validate arguments
|
||||
switch (flavor) {
|
||||
// "function-call" like things get a polymorphic cache
|
||||
case METHOD_CALL:
|
||||
case METHOD_CALL -> {
|
||||
if (args.length == 0) {
|
||||
throw new BootstrapMethodError("Invalid number of parameters for method call");
|
||||
}
|
||||
|
@ -509,17 +509,14 @@ public final class DefBootstrap {
|
|||
throw new BootstrapMethodError("Illegal number of parameters: expected " + numLambdas + " references");
|
||||
}
|
||||
return new PIC(painlessLookup, functions, constants, methodHandlesLookup, name, type, initialDepth, flavor, args);
|
||||
case LOAD:
|
||||
case STORE:
|
||||
case ARRAY_LOAD:
|
||||
case ARRAY_STORE:
|
||||
case ITERATOR:
|
||||
case INDEX_NORMALIZE:
|
||||
}
|
||||
case LOAD, STORE, ARRAY_LOAD, ARRAY_STORE, ITERATOR, INDEX_NORMALIZE -> {
|
||||
if (args.length > 0) {
|
||||
throw new BootstrapMethodError("Illegal static bootstrap parameters for flavor: " + flavor);
|
||||
}
|
||||
return new PIC(painlessLookup, functions, constants, methodHandlesLookup, name, type, initialDepth, flavor, args);
|
||||
case REFERENCE:
|
||||
}
|
||||
case REFERENCE -> {
|
||||
if (args.length != 1) {
|
||||
throw new BootstrapMethodError("Invalid number of parameters for reference call");
|
||||
}
|
||||
|
@ -527,11 +524,10 @@ public final class DefBootstrap {
|
|||
throw new BootstrapMethodError("Illegal parameter for reference call: " + args[0]);
|
||||
}
|
||||
return new PIC(painlessLookup, functions, constants, methodHandlesLookup, name, type, initialDepth, flavor, args);
|
||||
}
|
||||
|
||||
// operators get monomorphic cache, with a generic impl for a fallback
|
||||
case UNARY_OPERATOR:
|
||||
case SHIFT_OPERATOR:
|
||||
case BINARY_OPERATOR:
|
||||
case UNARY_OPERATOR, SHIFT_OPERATOR, BINARY_OPERATOR -> {
|
||||
if (args.length != 1) {
|
||||
throw new BootstrapMethodError("Invalid number of parameters for operator call");
|
||||
}
|
||||
|
@ -548,8 +544,8 @@ public final class DefBootstrap {
|
|||
throw new BootstrapMethodError("This parameter is only supported for BINARY/SHIFT_OPERATORs");
|
||||
}
|
||||
return new MIC(name, type, initialDepth, flavor, flags);
|
||||
default:
|
||||
throw new BootstrapMethodError("Illegal static bootstrap parameter for flavor: " + flavor);
|
||||
}
|
||||
default -> throw new BootstrapMethodError("Illegal static bootstrap parameter for flavor: " + flavor);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -331,16 +331,10 @@ public final class MethodWriter extends GeneratorAdapter {
|
|||
Type methodType = Type.getMethodType(getType(returnType), getType(lhs), getType(rhs));
|
||||
|
||||
switch (operation) {
|
||||
case MUL:
|
||||
invokeDefCall("mul", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
||||
break;
|
||||
case DIV:
|
||||
invokeDefCall("div", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
||||
break;
|
||||
case REM:
|
||||
invokeDefCall("rem", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
||||
break;
|
||||
case ADD:
|
||||
case MUL -> invokeDefCall("mul", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
||||
case DIV -> invokeDefCall("div", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
||||
case REM -> invokeDefCall("rem", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
||||
case ADD -> {
|
||||
// if either side is primitive, then the + operator should always throw NPE on null,
|
||||
// so we don't need a special NPE guard.
|
||||
// otherwise, we need to allow nulls for possible string concatenation.
|
||||
|
@ -349,30 +343,15 @@ public final class MethodWriter extends GeneratorAdapter {
|
|||
flags |= DefBootstrap.OPERATOR_ALLOWS_NULL;
|
||||
}
|
||||
invokeDefCall("add", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
||||
break;
|
||||
case SUB:
|
||||
invokeDefCall("sub", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
||||
break;
|
||||
case LSH:
|
||||
invokeDefCall("lsh", methodType, DefBootstrap.SHIFT_OPERATOR, flags);
|
||||
break;
|
||||
case USH:
|
||||
invokeDefCall("ush", methodType, DefBootstrap.SHIFT_OPERATOR, flags);
|
||||
break;
|
||||
case RSH:
|
||||
invokeDefCall("rsh", methodType, DefBootstrap.SHIFT_OPERATOR, flags);
|
||||
break;
|
||||
case BWAND:
|
||||
invokeDefCall("and", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
||||
break;
|
||||
case XOR:
|
||||
invokeDefCall("xor", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
||||
break;
|
||||
case BWOR:
|
||||
invokeDefCall("or", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
||||
break;
|
||||
default:
|
||||
throw location.createError(new IllegalStateException("Illegal tree structure."));
|
||||
}
|
||||
case SUB -> invokeDefCall("sub", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
||||
case LSH -> invokeDefCall("lsh", methodType, DefBootstrap.SHIFT_OPERATOR, flags);
|
||||
case USH -> invokeDefCall("ush", methodType, DefBootstrap.SHIFT_OPERATOR, flags);
|
||||
case RSH -> invokeDefCall("rsh", methodType, DefBootstrap.SHIFT_OPERATOR, flags);
|
||||
case BWAND -> invokeDefCall("and", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
||||
case XOR -> invokeDefCall("xor", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
||||
case BWOR -> invokeDefCall("or", methodType, DefBootstrap.BINARY_OPERATOR, flags);
|
||||
default -> throw location.createError(new IllegalStateException("Illegal tree structure."));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -389,41 +368,18 @@ public final class MethodWriter extends GeneratorAdapter {
|
|||
}
|
||||
|
||||
switch (operation) {
|
||||
case MUL:
|
||||
math(GeneratorAdapter.MUL, getType(clazz));
|
||||
break;
|
||||
case DIV:
|
||||
math(GeneratorAdapter.DIV, getType(clazz));
|
||||
break;
|
||||
case REM:
|
||||
math(GeneratorAdapter.REM, getType(clazz));
|
||||
break;
|
||||
case ADD:
|
||||
math(GeneratorAdapter.ADD, getType(clazz));
|
||||
break;
|
||||
case SUB:
|
||||
math(GeneratorAdapter.SUB, getType(clazz));
|
||||
break;
|
||||
case LSH:
|
||||
math(GeneratorAdapter.SHL, getType(clazz));
|
||||
break;
|
||||
case USH:
|
||||
math(GeneratorAdapter.USHR, getType(clazz));
|
||||
break;
|
||||
case RSH:
|
||||
math(GeneratorAdapter.SHR, getType(clazz));
|
||||
break;
|
||||
case BWAND:
|
||||
math(GeneratorAdapter.AND, getType(clazz));
|
||||
break;
|
||||
case XOR:
|
||||
math(GeneratorAdapter.XOR, getType(clazz));
|
||||
break;
|
||||
case BWOR:
|
||||
math(GeneratorAdapter.OR, getType(clazz));
|
||||
break;
|
||||
default:
|
||||
throw location.createError(new IllegalStateException("Illegal tree structure."));
|
||||
case MUL -> math(GeneratorAdapter.MUL, getType(clazz));
|
||||
case DIV -> math(GeneratorAdapter.DIV, getType(clazz));
|
||||
case REM -> math(GeneratorAdapter.REM, getType(clazz));
|
||||
case ADD -> math(GeneratorAdapter.ADD, getType(clazz));
|
||||
case SUB -> math(GeneratorAdapter.SUB, getType(clazz));
|
||||
case LSH -> math(GeneratorAdapter.SHL, getType(clazz));
|
||||
case USH -> math(GeneratorAdapter.USHR, getType(clazz));
|
||||
case RSH -> math(GeneratorAdapter.SHR, getType(clazz));
|
||||
case BWAND -> math(GeneratorAdapter.AND, getType(clazz));
|
||||
case XOR -> math(GeneratorAdapter.XOR, getType(clazz));
|
||||
case BWOR -> math(GeneratorAdapter.OR, getType(clazz));
|
||||
default -> throw location.createError(new IllegalStateException("Illegal tree structure."));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -27,20 +27,14 @@ public class Json {
|
|||
json
|
||||
);
|
||||
|
||||
switch (parser.nextToken()) {
|
||||
case START_ARRAY:
|
||||
return parser.list();
|
||||
case START_OBJECT:
|
||||
return parser.map();
|
||||
case VALUE_NUMBER:
|
||||
return parser.numberValue();
|
||||
case VALUE_BOOLEAN:
|
||||
return parser.booleanValue();
|
||||
case VALUE_STRING:
|
||||
return parser.text();
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
return switch (parser.nextToken()) {
|
||||
case START_ARRAY -> parser.list();
|
||||
case START_OBJECT -> parser.map();
|
||||
case VALUE_NUMBER -> parser.numberValue();
|
||||
case VALUE_BOOLEAN -> parser.booleanValue();
|
||||
case VALUE_STRING -> parser.text();
|
||||
default -> null;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -2282,34 +2282,17 @@ public class DefaultSemanticAnalysisPhase extends UserTreeBaseVisitor<SemanticSc
|
|||
for (int i = 0; i < flags.length(); ++i) {
|
||||
char flag = flags.charAt(i);
|
||||
|
||||
switch (flag) {
|
||||
case 'c':
|
||||
regexFlags |= Pattern.CANON_EQ;
|
||||
break;
|
||||
case 'i':
|
||||
regexFlags |= Pattern.CASE_INSENSITIVE;
|
||||
break;
|
||||
case 'l':
|
||||
regexFlags |= Pattern.LITERAL;
|
||||
break;
|
||||
case 'm':
|
||||
regexFlags |= Pattern.MULTILINE;
|
||||
break;
|
||||
case 's':
|
||||
regexFlags |= Pattern.DOTALL;
|
||||
break;
|
||||
case 'U':
|
||||
regexFlags |= Pattern.UNICODE_CHARACTER_CLASS;
|
||||
break;
|
||||
case 'u':
|
||||
regexFlags |= Pattern.UNICODE_CASE;
|
||||
break;
|
||||
case 'x':
|
||||
regexFlags |= Pattern.COMMENTS;
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("invalid regular expression: unknown flag [" + flag + "]");
|
||||
}
|
||||
regexFlags |= switch (flag) {
|
||||
case 'c' -> Pattern.CANON_EQ;
|
||||
case 'i' -> Pattern.CASE_INSENSITIVE;
|
||||
case 'l' -> Pattern.LITERAL;
|
||||
case 'm' -> Pattern.MULTILINE;
|
||||
case 's' -> Pattern.DOTALL;
|
||||
case 'U' -> Pattern.UNICODE_CHARACTER_CLASS;
|
||||
case 'u' -> Pattern.UNICODE_CASE;
|
||||
case 'x' -> Pattern.COMMENTS;
|
||||
default -> throw new IllegalArgumentException("invalid regular expression: unknown flag [" + flag + "]");
|
||||
};
|
||||
}
|
||||
|
||||
Pattern compiled;
|
||||
|
|
|
@ -31,22 +31,15 @@ public class ListTests extends ArrayLikeObjectTestCase {
|
|||
}
|
||||
|
||||
private String fillValue(String valueType) {
|
||||
switch (valueType) {
|
||||
case "int":
|
||||
return "0";
|
||||
case "long":
|
||||
return "0L";
|
||||
case "short":
|
||||
return "(short) 0";
|
||||
case "byte":
|
||||
return "(byte) 0";
|
||||
case "float":
|
||||
return "0.0f";
|
||||
case "double":
|
||||
return "0.0"; // Double is implicit for decimal constants
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
return switch (valueType) {
|
||||
case "int" -> "0";
|
||||
case "long" -> "0L";
|
||||
case "short" -> "(short) 0";
|
||||
case "byte" -> "(byte) 0";
|
||||
case "float" -> "0.0f";
|
||||
case "double" -> "0.0"; // Double is implicit for decimal constants
|
||||
default -> null;
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -22,20 +22,12 @@ public class PainlessExecuteResponseTests extends AbstractSerializingTestCase<Pa
|
|||
|
||||
@Override
|
||||
protected PainlessExecuteAction.Response createTestInstance() {
|
||||
Object result;
|
||||
switch (randomIntBetween(0, 2)) {
|
||||
case 0:
|
||||
result = randomAlphaOfLength(10);
|
||||
break;
|
||||
case 1:
|
||||
result = randomBoolean();
|
||||
break;
|
||||
case 2:
|
||||
result = randomDoubleBetween(-10, 10, true);
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException("invalid branch");
|
||||
}
|
||||
Object result = switch (randomIntBetween(0, 2)) {
|
||||
case 0 -> randomAlphaOfLength(10);
|
||||
case 1 -> randomBoolean();
|
||||
case 2 -> randomDoubleBetween(-10, 10, true);
|
||||
default -> throw new IllegalStateException("invalid branch");
|
||||
};
|
||||
return new PainlessExecuteAction.Response(result);
|
||||
}
|
||||
|
||||
|
@ -44,20 +36,12 @@ public class PainlessExecuteResponseTests extends AbstractSerializingTestCase<Pa
|
|||
parser.nextToken(); // START-OBJECT
|
||||
parser.nextToken(); // FIELD-NAME
|
||||
XContentParser.Token token = parser.nextToken(); // result value
|
||||
Object result;
|
||||
switch (token) {
|
||||
case VALUE_STRING:
|
||||
result = parser.text();
|
||||
break;
|
||||
case VALUE_BOOLEAN:
|
||||
result = parser.booleanValue();
|
||||
break;
|
||||
case VALUE_NUMBER:
|
||||
result = parser.doubleValue();
|
||||
break;
|
||||
default:
|
||||
throw new IOException("invalid response");
|
||||
}
|
||||
Object result = switch (token) {
|
||||
case VALUE_STRING -> parser.text();
|
||||
case VALUE_BOOLEAN -> parser.booleanValue();
|
||||
case VALUE_NUMBER -> parser.doubleValue();
|
||||
default -> throw new IOException("invalid response");
|
||||
};
|
||||
return new PainlessExecuteAction.Response(result);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -112,15 +112,11 @@ public class LegacyGeoShapeFieldMapper extends AbstractShapeGeometryFieldMapper<
|
|||
public static final double DISTANCE_ERROR_PCT = 0.025d;
|
||||
|
||||
public static int defaultTreeLevel(String tree) {
|
||||
switch (tree) {
|
||||
case PrefixTrees.GEOHASH:
|
||||
return GEOHASH_TREE_LEVELS;
|
||||
case PrefixTrees.LEGACY_QUADTREE:
|
||||
case PrefixTrees.QUADTREE:
|
||||
return QUADTREE_LEVELS;
|
||||
default:
|
||||
throw new IllegalArgumentException("Unknown prefix type [" + tree + "]");
|
||||
}
|
||||
return switch (tree) {
|
||||
case PrefixTrees.GEOHASH -> GEOHASH_TREE_LEVELS;
|
||||
case PrefixTrees.LEGACY_QUADTREE, PrefixTrees.QUADTREE -> QUADTREE_LEVELS;
|
||||
default -> throw new IllegalArgumentException("Unknown prefix type [" + tree + "]");
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -102,26 +102,17 @@ public class GeoWKTParser {
|
|||
throw new ElasticsearchParseException("Expected geometry type [{}] but found [{}]", shapeType, type);
|
||||
}
|
||||
}
|
||||
switch (type) {
|
||||
case POINT:
|
||||
return parsePoint(stream, ignoreZValue, coerce);
|
||||
case MULTIPOINT:
|
||||
return parseMultiPoint(stream, ignoreZValue, coerce);
|
||||
case LINESTRING:
|
||||
return parseLine(stream, ignoreZValue, coerce);
|
||||
case MULTILINESTRING:
|
||||
return parseMultiLine(stream, ignoreZValue, coerce);
|
||||
case POLYGON:
|
||||
return parsePolygon(stream, ignoreZValue, coerce);
|
||||
case MULTIPOLYGON:
|
||||
return parseMultiPolygon(stream, ignoreZValue, coerce);
|
||||
case ENVELOPE:
|
||||
return parseBBox(stream);
|
||||
case GEOMETRYCOLLECTION:
|
||||
return parseGeometryCollection(stream, ignoreZValue, coerce);
|
||||
default:
|
||||
throw new IllegalArgumentException("Unknown geometry type: " + type);
|
||||
}
|
||||
return switch (type) {
|
||||
case POINT -> parsePoint(stream, ignoreZValue, coerce);
|
||||
case MULTIPOINT -> parseMultiPoint(stream, ignoreZValue, coerce);
|
||||
case LINESTRING -> parseLine(stream, ignoreZValue, coerce);
|
||||
case MULTILINESTRING -> parseMultiLine(stream, ignoreZValue, coerce);
|
||||
case POLYGON -> parsePolygon(stream, ignoreZValue, coerce);
|
||||
case MULTIPOLYGON -> parseMultiPolygon(stream, ignoreZValue, coerce);
|
||||
case ENVELOPE -> parseBBox(stream);
|
||||
case GEOMETRYCOLLECTION -> parseGeometryCollection(stream, ignoreZValue, coerce);
|
||||
default -> throw new IllegalArgumentException("Unknown geometry type: " + type);
|
||||
};
|
||||
}
|
||||
|
||||
private static EnvelopeBuilder parseBBox(StreamTokenizer stream) throws IOException, ElasticsearchParseException {
|
||||
|
@ -317,17 +308,13 @@ public class GeoWKTParser {
|
|||
}
|
||||
|
||||
private static String tokenString(StreamTokenizer stream) {
|
||||
switch (stream.ttype) {
|
||||
case StreamTokenizer.TT_WORD:
|
||||
return stream.sval;
|
||||
case StreamTokenizer.TT_EOF:
|
||||
return EOF;
|
||||
case StreamTokenizer.TT_EOL:
|
||||
return EOL;
|
||||
case StreamTokenizer.TT_NUMBER:
|
||||
return NUMBER;
|
||||
}
|
||||
return "'" + (char) stream.ttype + "'";
|
||||
return switch (stream.ttype) {
|
||||
case StreamTokenizer.TT_WORD -> stream.sval;
|
||||
case StreamTokenizer.TT_EOF -> EOF;
|
||||
case StreamTokenizer.TT_EOL -> EOL;
|
||||
case StreamTokenizer.TT_NUMBER -> NUMBER;
|
||||
default -> "'" + (char) stream.ttype + "'";
|
||||
};
|
||||
}
|
||||
|
||||
private static boolean isNumberNext(StreamTokenizer stream) throws IOException {
|
||||
|
|
|
@ -97,18 +97,12 @@ public class LegacyGeoShapeQueryProcessor {
|
|||
}
|
||||
|
||||
public static SpatialArgs getArgs(Geometry shape, ShapeRelation relation) {
|
||||
switch (relation) {
|
||||
case DISJOINT:
|
||||
return new SpatialArgs(SpatialOperation.IsDisjointTo, buildS4J(shape));
|
||||
case INTERSECTS:
|
||||
return new SpatialArgs(SpatialOperation.Intersects, buildS4J(shape));
|
||||
case WITHIN:
|
||||
return new SpatialArgs(SpatialOperation.IsWithin, buildS4J(shape));
|
||||
case CONTAINS:
|
||||
return new SpatialArgs(SpatialOperation.Contains, buildS4J(shape));
|
||||
default:
|
||||
throw new IllegalArgumentException("invalid relation [" + relation + "]");
|
||||
}
|
||||
return switch (relation) {
|
||||
case DISJOINT -> new SpatialArgs(SpatialOperation.IsDisjointTo, buildS4J(shape));
|
||||
case INTERSECTS -> new SpatialArgs(SpatialOperation.Intersects, buildS4J(shape));
|
||||
case WITHIN -> new SpatialArgs(SpatialOperation.IsWithin, buildS4J(shape));
|
||||
case CONTAINS -> new SpatialArgs(SpatialOperation.Contains, buildS4J(shape));
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -35,35 +35,27 @@ public class EnvelopeBuilderTests extends AbstractShapeBuilderTestCase<EnvelopeB
|
|||
}
|
||||
|
||||
static EnvelopeBuilder mutate(EnvelopeBuilder original) throws IOException {
|
||||
EnvelopeBuilder mutation = copyShape(original);
|
||||
copyShape(original);
|
||||
// move one corner to the middle of original
|
||||
switch (randomIntBetween(0, 3)) {
|
||||
case 0:
|
||||
mutation = new EnvelopeBuilder(
|
||||
new Coordinate(randomDoubleBetween(-180.0, original.bottomRight().x, true), original.topLeft().y),
|
||||
original.bottomRight()
|
||||
);
|
||||
break;
|
||||
case 1:
|
||||
mutation = new EnvelopeBuilder(
|
||||
new Coordinate(original.topLeft().x, randomDoubleBetween(original.bottomRight().y, 90.0, true)),
|
||||
original.bottomRight()
|
||||
);
|
||||
break;
|
||||
case 2:
|
||||
mutation = new EnvelopeBuilder(
|
||||
original.topLeft(),
|
||||
new Coordinate(randomDoubleBetween(original.topLeft().x, 180.0, true), original.bottomRight().y)
|
||||
);
|
||||
break;
|
||||
case 3:
|
||||
mutation = new EnvelopeBuilder(
|
||||
original.topLeft(),
|
||||
new Coordinate(original.bottomRight().x, randomDoubleBetween(-90.0, original.topLeft().y, true))
|
||||
);
|
||||
break;
|
||||
}
|
||||
return mutation;
|
||||
return switch (randomIntBetween(0, 3)) {
|
||||
case 0 -> new EnvelopeBuilder(
|
||||
new Coordinate(randomDoubleBetween(-180.0, original.bottomRight().x, true), original.topLeft().y),
|
||||
original.bottomRight()
|
||||
);
|
||||
case 1 -> new EnvelopeBuilder(
|
||||
new Coordinate(original.topLeft().x, randomDoubleBetween(original.bottomRight().y, 90.0, true)),
|
||||
original.bottomRight()
|
||||
);
|
||||
case 2 -> new EnvelopeBuilder(
|
||||
original.topLeft(),
|
||||
new Coordinate(randomDoubleBetween(original.topLeft().x, 180.0, true), original.bottomRight().y)
|
||||
);
|
||||
case 3 -> new EnvelopeBuilder(
|
||||
original.topLeft(),
|
||||
new Coordinate(original.bottomRight().x, randomDoubleBetween(-90.0, original.topLeft().y, true))
|
||||
);
|
||||
default -> copyShape(original);
|
||||
};
|
||||
}
|
||||
|
||||
static EnvelopeBuilder createRandomShape() {
|
||||
|
|
|
@ -20,30 +20,14 @@ public class GeometryCollectionBuilderTests extends AbstractShapeBuilderTestCase
|
|||
int shapes = randomIntBetween(0, 8);
|
||||
for (int i = 0; i < shapes; i++) {
|
||||
switch (randomIntBetween(0, 7)) {
|
||||
case 0:
|
||||
geometryCollection.shape(PointBuilderTests.createRandomShape());
|
||||
break;
|
||||
case 1:
|
||||
geometryCollection.shape(CircleBuilderTests.createRandomShape());
|
||||
break;
|
||||
case 2:
|
||||
geometryCollection.shape(EnvelopeBuilderTests.createRandomShape());
|
||||
break;
|
||||
case 3:
|
||||
geometryCollection.shape(LineStringBuilderTests.createRandomShape());
|
||||
break;
|
||||
case 4:
|
||||
geometryCollection.shape(MultiLineStringBuilderTests.createRandomShape());
|
||||
break;
|
||||
case 5:
|
||||
geometryCollection.shape(MultiPolygonBuilderTests.createRandomShape());
|
||||
break;
|
||||
case 6:
|
||||
geometryCollection.shape(MultiPointBuilderTests.createRandomShape());
|
||||
break;
|
||||
case 7:
|
||||
geometryCollection.shape(PolygonBuilderTests.createRandomShape());
|
||||
break;
|
||||
case 0 -> geometryCollection.shape(PointBuilderTests.createRandomShape());
|
||||
case 1 -> geometryCollection.shape(CircleBuilderTests.createRandomShape());
|
||||
case 2 -> geometryCollection.shape(EnvelopeBuilderTests.createRandomShape());
|
||||
case 3 -> geometryCollection.shape(LineStringBuilderTests.createRandomShape());
|
||||
case 4 -> geometryCollection.shape(MultiLineStringBuilderTests.createRandomShape());
|
||||
case 5 -> geometryCollection.shape(MultiPolygonBuilderTests.createRandomShape());
|
||||
case 6 -> geometryCollection.shape(MultiPointBuilderTests.createRandomShape());
|
||||
case 7 -> geometryCollection.shape(PolygonBuilderTests.createRandomShape());
|
||||
}
|
||||
}
|
||||
return geometryCollection;
|
||||
|
@ -59,35 +43,19 @@ public class GeometryCollectionBuilderTests extends AbstractShapeBuilderTestCase
|
|||
if (mutation.shapes.size() > 0) {
|
||||
int shapePosition = randomIntBetween(0, mutation.shapes.size() - 1);
|
||||
ShapeBuilder<?, ?, ?> shapeToChange = mutation.shapes.get(shapePosition);
|
||||
switch (shapeToChange.type()) {
|
||||
case POINT:
|
||||
shapeToChange = PointBuilderTests.mutate((PointBuilder) shapeToChange);
|
||||
break;
|
||||
case CIRCLE:
|
||||
shapeToChange = CircleBuilderTests.mutate((CircleBuilder) shapeToChange);
|
||||
break;
|
||||
case ENVELOPE:
|
||||
shapeToChange = EnvelopeBuilderTests.mutate((EnvelopeBuilder) shapeToChange);
|
||||
break;
|
||||
case LINESTRING:
|
||||
shapeToChange = LineStringBuilderTests.mutate((LineStringBuilder) shapeToChange);
|
||||
break;
|
||||
case MULTILINESTRING:
|
||||
shapeToChange = MultiLineStringBuilderTests.mutate((MultiLineStringBuilder) shapeToChange);
|
||||
break;
|
||||
case MULTIPOLYGON:
|
||||
shapeToChange = MultiPolygonBuilderTests.mutate((MultiPolygonBuilder) shapeToChange);
|
||||
break;
|
||||
case MULTIPOINT:
|
||||
shapeToChange = MultiPointBuilderTests.mutate((MultiPointBuilder) shapeToChange);
|
||||
break;
|
||||
case POLYGON:
|
||||
shapeToChange = PolygonBuilderTests.mutate((PolygonBuilder) shapeToChange);
|
||||
break;
|
||||
case GEOMETRYCOLLECTION:
|
||||
throw new UnsupportedOperationException("GeometryCollection should not be nested inside each other");
|
||||
}
|
||||
mutation.shapes.set(shapePosition, shapeToChange);
|
||||
mutation.shapes.set(shapePosition, switch (shapeToChange.type()) {
|
||||
case POINT -> PointBuilderTests.mutate((PointBuilder) shapeToChange);
|
||||
case CIRCLE -> CircleBuilderTests.mutate((CircleBuilder) shapeToChange);
|
||||
case ENVELOPE -> EnvelopeBuilderTests.mutate((EnvelopeBuilder) shapeToChange);
|
||||
case LINESTRING -> LineStringBuilderTests.mutate((LineStringBuilder) shapeToChange);
|
||||
case MULTILINESTRING -> MultiLineStringBuilderTests.mutate((MultiLineStringBuilder) shapeToChange);
|
||||
case MULTIPOLYGON -> MultiPolygonBuilderTests.mutate((MultiPolygonBuilder) shapeToChange);
|
||||
case MULTIPOINT -> MultiPointBuilderTests.mutate((MultiPointBuilder) shapeToChange);
|
||||
case POLYGON -> PolygonBuilderTests.mutate((PolygonBuilder) shapeToChange);
|
||||
case GEOMETRYCOLLECTION -> throw new UnsupportedOperationException(
|
||||
"GeometryCollection should not be nested inside each other"
|
||||
);
|
||||
});
|
||||
} else {
|
||||
mutation.shape(RandomShapeGenerator.createShape(random()));
|
||||
}
|
||||
|
|
|
@ -290,18 +290,13 @@ public final class RankFeatureQueryBuilder extends AbstractQueryBuilder<RankFeat
|
|||
|
||||
private static ScoreFunction readScoreFunction(StreamInput in) throws IOException {
|
||||
byte b = in.readByte();
|
||||
switch (b) {
|
||||
case 0:
|
||||
return new ScoreFunction.Log(in);
|
||||
case 1:
|
||||
return new ScoreFunction.Saturation(in);
|
||||
case 2:
|
||||
return new ScoreFunction.Sigmoid(in);
|
||||
case 3:
|
||||
return new ScoreFunction.Linear(in);
|
||||
default:
|
||||
throw new IOException("Illegal score function id: " + b);
|
||||
}
|
||||
return switch (b) {
|
||||
case 0 -> new ScoreFunction.Log(in);
|
||||
case 1 -> new ScoreFunction.Saturation(in);
|
||||
case 2 -> new ScoreFunction.Sigmoid(in);
|
||||
case 3 -> new ScoreFunction.Linear(in);
|
||||
default -> throw new IOException("Illegal score function id: " + b);
|
||||
};
|
||||
}
|
||||
|
||||
public static final ConstructingObjectParser<RankFeatureQueryBuilder, Void> PARSER = new ConstructingObjectParser<>("feature", args -> {
|
||||
|
|
|
@ -341,17 +341,12 @@ public class ScaledFloatFieldMapperTests extends MapperTestCase {
|
|||
* range of valid values.
|
||||
*/
|
||||
double v = randomDoubleBetween(-Float.MAX_VALUE, Float.MAX_VALUE, true);
|
||||
switch (between(0, 3)) {
|
||||
case 0:
|
||||
return v;
|
||||
case 1:
|
||||
return (float) v;
|
||||
case 2:
|
||||
return Double.toString(v);
|
||||
case 3:
|
||||
return Float.toString((float) v);
|
||||
default:
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
return switch (between(0, 3)) {
|
||||
case 0 -> v;
|
||||
case 1 -> (float) v;
|
||||
case 2 -> Double.toString(v);
|
||||
case 3 -> Float.toString((float) v);
|
||||
default -> throw new IllegalArgumentException();
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -921,31 +921,29 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
|
|||
final int randomFrom = randomInt();
|
||||
final byte[] encodedFrom;
|
||||
switch (encodingType) {
|
||||
case 0:
|
||||
case 0 -> {
|
||||
encodedFrom = new byte[Integer.BYTES];
|
||||
IntPoint.encodeDimension(randomFrom, encodedFrom, 0);
|
||||
break;
|
||||
case 1:
|
||||
}
|
||||
case 1 -> {
|
||||
encodedFrom = new byte[Long.BYTES];
|
||||
LongPoint.encodeDimension(randomFrom, encodedFrom, 0);
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("unexpected encoding type [" + encodingType + "]");
|
||||
}
|
||||
default -> throw new AssertionError("unexpected encoding type [" + encodingType + "]");
|
||||
}
|
||||
|
||||
final int randomTo = randomIntBetween(randomFrom, Integer.MAX_VALUE);
|
||||
final byte[] encodedTo;
|
||||
switch (encodingType) {
|
||||
case 0:
|
||||
case 0 -> {
|
||||
encodedTo = new byte[Integer.BYTES];
|
||||
IntPoint.encodeDimension(randomTo, encodedTo, 0);
|
||||
break;
|
||||
case 1:
|
||||
}
|
||||
case 1 -> {
|
||||
encodedTo = new byte[Long.BYTES];
|
||||
LongPoint.encodeDimension(randomTo, encodedTo, 0);
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("unexpected encoding type [" + encodingType + "]");
|
||||
}
|
||||
default -> throw new AssertionError("unexpected encoding type [" + encodingType + "]");
|
||||
}
|
||||
|
||||
String randomFieldName = randomAlphaOfLength(5);
|
||||
|
@ -957,20 +955,19 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
|
|||
MurmurHash3.hash128(fieldAsBytesRef.bytes, fieldAsBytesRef.offset, fieldAsBytesRef.length, 0, hash);
|
||||
|
||||
switch (encodingType) {
|
||||
case 0:
|
||||
case 0 -> {
|
||||
assertEquals(hash.h1, ByteBuffer.wrap(subByteArray(result, 0, 8)).getLong());
|
||||
assertEquals(randomFrom, IntPoint.decodeDimension(subByteArray(result, 12, 4), 0));
|
||||
assertEquals(hash.h1, ByteBuffer.wrap(subByteArray(result, 16, 8)).getLong());
|
||||
assertEquals(randomTo, IntPoint.decodeDimension(subByteArray(result, 28, 4), 0));
|
||||
break;
|
||||
case 1:
|
||||
}
|
||||
case 1 -> {
|
||||
assertEquals(hash.h1, ByteBuffer.wrap(subByteArray(result, 0, 8)).getLong());
|
||||
assertEquals(randomFrom, LongPoint.decodeDimension(subByteArray(result, 8, 8), 0));
|
||||
assertEquals(hash.h1, ByteBuffer.wrap(subByteArray(result, 16, 8)).getLong());
|
||||
assertEquals(randomTo, LongPoint.decodeDimension(subByteArray(result, 24, 8), 0));
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError("unexpected encoding type [" + encodingType + "]");
|
||||
}
|
||||
default -> throw new AssertionError("unexpected encoding type [" + encodingType + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -319,23 +319,19 @@ public class DiscountedCumulativeGainTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private static DiscountedCumulativeGain mutateTestItem(DiscountedCumulativeGain original) {
|
||||
switch (randomIntBetween(0, 2)) {
|
||||
case 0:
|
||||
return new DiscountedCumulativeGain(original.getNormalize() == false, original.getUnknownDocRating(), original.getK());
|
||||
case 1:
|
||||
return new DiscountedCumulativeGain(
|
||||
original.getNormalize(),
|
||||
randomValueOtherThan(original.getUnknownDocRating(), () -> randomIntBetween(0, 10)),
|
||||
original.getK()
|
||||
);
|
||||
case 2:
|
||||
return new DiscountedCumulativeGain(
|
||||
original.getNormalize(),
|
||||
original.getUnknownDocRating(),
|
||||
randomValueOtherThan(original.getK(), () -> randomIntBetween(1, 10))
|
||||
);
|
||||
default:
|
||||
throw new IllegalArgumentException("mutation variant not allowed");
|
||||
}
|
||||
return switch (randomIntBetween(0, 2)) {
|
||||
case 0 -> new DiscountedCumulativeGain(original.getNormalize() == false, original.getUnknownDocRating(), original.getK());
|
||||
case 1 -> new DiscountedCumulativeGain(
|
||||
original.getNormalize(),
|
||||
randomValueOtherThan(original.getUnknownDocRating(), () -> randomIntBetween(0, 10)),
|
||||
original.getK()
|
||||
);
|
||||
case 2 -> new DiscountedCumulativeGain(
|
||||
original.getNormalize(),
|
||||
original.getUnknownDocRating(),
|
||||
randomValueOtherThan(original.getK(), () -> randomIntBetween(1, 10))
|
||||
);
|
||||
default -> throw new IllegalArgumentException("mutation variant not allowed");
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -52,23 +52,16 @@ public class EvalQueryQualityTests extends ESTestCase {
|
|||
if (randomBoolean()) {
|
||||
int metricDetail = randomIntBetween(0, 2);
|
||||
switch (metricDetail) {
|
||||
case 0:
|
||||
evalQueryQuality.setMetricDetails(new PrecisionAtK.Detail(randomIntBetween(0, 1000), randomIntBetween(0, 1000)));
|
||||
break;
|
||||
case 1:
|
||||
evalQueryQuality.setMetricDetails(new MeanReciprocalRank.Detail(randomIntBetween(0, 1000)));
|
||||
break;
|
||||
case 2:
|
||||
evalQueryQuality.setMetricDetails(
|
||||
new DiscountedCumulativeGain.Detail(
|
||||
randomDoubleBetween(0, 1, true),
|
||||
randomBoolean() ? randomDoubleBetween(0, 1, true) : 0,
|
||||
randomInt()
|
||||
)
|
||||
);
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("illegal randomized value in test");
|
||||
case 0 -> evalQueryQuality.setMetricDetails(new PrecisionAtK.Detail(randomIntBetween(0, 1000), randomIntBetween(0, 1000)));
|
||||
case 1 -> evalQueryQuality.setMetricDetails(new MeanReciprocalRank.Detail(randomIntBetween(0, 1000)));
|
||||
case 2 -> evalQueryQuality.setMetricDetails(
|
||||
new DiscountedCumulativeGain.Detail(
|
||||
randomDoubleBetween(0, 1, true),
|
||||
randomBoolean() ? randomDoubleBetween(0, 1, true) : 0,
|
||||
randomInt()
|
||||
)
|
||||
);
|
||||
default -> throw new IllegalArgumentException("illegal randomized value in test");
|
||||
}
|
||||
}
|
||||
evalQueryQuality.addHitsAndRatings(ratedHits);
|
||||
|
|
|
@ -196,23 +196,19 @@ public class ExpectedReciprocalRankTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private static ExpectedReciprocalRank mutateTestItem(ExpectedReciprocalRank original) {
|
||||
switch (randomIntBetween(0, 2)) {
|
||||
case 0:
|
||||
return new ExpectedReciprocalRank(original.getMaxRelevance() + 1, original.getUnknownDocRating(), original.getK());
|
||||
case 1:
|
||||
return new ExpectedReciprocalRank(
|
||||
original.getMaxRelevance(),
|
||||
randomValueOtherThan(original.getUnknownDocRating(), () -> randomIntBetween(0, 10)),
|
||||
original.getK()
|
||||
);
|
||||
case 2:
|
||||
return new ExpectedReciprocalRank(
|
||||
original.getMaxRelevance(),
|
||||
original.getUnknownDocRating(),
|
||||
randomValueOtherThan(original.getK(), () -> randomIntBetween(1, 10))
|
||||
);
|
||||
default:
|
||||
throw new IllegalArgumentException("mutation variant not allowed");
|
||||
}
|
||||
return switch (randomIntBetween(0, 2)) {
|
||||
case 0 -> new ExpectedReciprocalRank(original.getMaxRelevance() + 1, original.getUnknownDocRating(), original.getK());
|
||||
case 1 -> new ExpectedReciprocalRank(
|
||||
original.getMaxRelevance(),
|
||||
randomValueOtherThan(original.getUnknownDocRating(), () -> randomIntBetween(0, 10)),
|
||||
original.getK()
|
||||
);
|
||||
case 2 -> new ExpectedReciprocalRank(
|
||||
original.getMaxRelevance(),
|
||||
original.getUnknownDocRating(),
|
||||
randomValueOtherThan(original.getK(), () -> randomIntBetween(1, 10))
|
||||
);
|
||||
default -> throw new IllegalArgumentException("mutation variant not allowed");
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -224,32 +224,24 @@ public class PrecisionAtKTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private static PrecisionAtK mutate(PrecisionAtK original) {
|
||||
PrecisionAtK pAtK;
|
||||
switch (randomIntBetween(0, 2)) {
|
||||
case 0:
|
||||
pAtK = new PrecisionAtK(
|
||||
original.getRelevantRatingThreshold(),
|
||||
original.getIgnoreUnlabeled() == false,
|
||||
original.forcedSearchSize().getAsInt()
|
||||
);
|
||||
break;
|
||||
case 1:
|
||||
pAtK = new PrecisionAtK(
|
||||
randomValueOtherThan(original.getRelevantRatingThreshold(), () -> randomIntBetween(0, 10)),
|
||||
original.getIgnoreUnlabeled(),
|
||||
original.forcedSearchSize().getAsInt()
|
||||
);
|
||||
break;
|
||||
case 2:
|
||||
pAtK = new PrecisionAtK(
|
||||
original.getRelevantRatingThreshold(),
|
||||
original.getIgnoreUnlabeled(),
|
||||
original.forcedSearchSize().getAsInt() + 1
|
||||
);
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException("The test should only allow three parameters mutated");
|
||||
}
|
||||
PrecisionAtK pAtK = switch (randomIntBetween(0, 2)) {
|
||||
case 0 -> new PrecisionAtK(
|
||||
original.getRelevantRatingThreshold(),
|
||||
original.getIgnoreUnlabeled() == false,
|
||||
original.forcedSearchSize().getAsInt()
|
||||
);
|
||||
case 1 -> new PrecisionAtK(
|
||||
randomValueOtherThan(original.getRelevantRatingThreshold(), () -> randomIntBetween(0, 10)),
|
||||
original.getIgnoreUnlabeled(),
|
||||
original.forcedSearchSize().getAsInt()
|
||||
);
|
||||
case 2 -> new PrecisionAtK(
|
||||
original.getRelevantRatingThreshold(),
|
||||
original.getIgnoreUnlabeled(),
|
||||
original.forcedSearchSize().getAsInt() + 1
|
||||
);
|
||||
default -> throw new IllegalStateException("The test should only allow three parameters mutated");
|
||||
};
|
||||
return pAtK;
|
||||
}
|
||||
|
||||
|
|
|
@ -80,17 +80,10 @@ public class RatedDocumentTests extends ESTestCase {
|
|||
String docId = original.getDocID();
|
||||
|
||||
switch (randomIntBetween(0, 2)) {
|
||||
case 0:
|
||||
rating = randomValueOtherThan(rating, () -> randomInt());
|
||||
break;
|
||||
case 1:
|
||||
index = randomValueOtherThan(index, () -> randomAlphaOfLength(10));
|
||||
break;
|
||||
case 2:
|
||||
docId = randomValueOtherThan(docId, () -> randomAlphaOfLength(10));
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException("The test should only allow two parameters mutated");
|
||||
case 0 -> rating = randomValueOtherThan(rating, () -> randomInt());
|
||||
case 1 -> index = randomValueOtherThan(index, () -> randomAlphaOfLength(10));
|
||||
case 2 -> docId = randomValueOtherThan(docId, () -> randomAlphaOfLength(10));
|
||||
default -> throw new IllegalStateException("The test should only allow two parameters mutated");
|
||||
}
|
||||
return new RatedDocument(index, docId, rating);
|
||||
}
|
||||
|
|
|
@ -40,14 +40,14 @@ public class RatedSearchHitTests extends ESTestCase {
|
|||
OptionalInt rating = original.getRating();
|
||||
SearchHit hit = original.getSearchHit();
|
||||
switch (randomIntBetween(0, 1)) {
|
||||
case 0:
|
||||
rating = rating.isPresent() ? OptionalInt.of(rating.getAsInt() + 1) : OptionalInt.of(randomInt(5));
|
||||
break;
|
||||
case 1:
|
||||
hit = new SearchHit(hit.docId(), hit.getId() + randomAlphaOfLength(10), Collections.emptyMap(), Collections.emptyMap());
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException("The test should only allow two parameters mutated");
|
||||
case 0 -> rating = rating.isPresent() ? OptionalInt.of(rating.getAsInt() + 1) : OptionalInt.of(randomInt(5));
|
||||
case 1 -> hit = new SearchHit(
|
||||
hit.docId(),
|
||||
hit.getId() + randomAlphaOfLength(10),
|
||||
Collections.emptyMap(),
|
||||
Collections.emptyMap()
|
||||
);
|
||||
default -> throw new IllegalStateException("The test should only allow two parameters mutated");
|
||||
}
|
||||
return new RatedSearchHit(hit, rating);
|
||||
}
|
||||
|
|
|
@ -202,20 +202,14 @@ public class RecallAtKTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private static RecallAtK mutate(RecallAtK original) {
|
||||
RecallAtK recallAtK;
|
||||
switch (randomIntBetween(0, 1)) {
|
||||
case 0:
|
||||
recallAtK = new RecallAtK(
|
||||
randomValueOtherThan(original.getRelevantRatingThreshold(), () -> randomIntBetween(0, 10)),
|
||||
original.forcedSearchSize().getAsInt()
|
||||
);
|
||||
break;
|
||||
case 1:
|
||||
recallAtK = new RecallAtK(original.getRelevantRatingThreshold(), original.forcedSearchSize().getAsInt() + 1);
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException("The test should only allow two parameters mutated");
|
||||
}
|
||||
RecallAtK recallAtK = switch (randomIntBetween(0, 1)) {
|
||||
case 0 -> new RecallAtK(
|
||||
randomValueOtherThan(original.getRelevantRatingThreshold(), () -> randomIntBetween(0, 10)),
|
||||
original.forcedSearchSize().getAsInt()
|
||||
);
|
||||
case 1 -> new RecallAtK(original.getRelevantRatingThreshold(), original.forcedSearchSize().getAsInt() + 1);
|
||||
default -> throw new IllegalStateException("The test should only allow two parameters mutated");
|
||||
};
|
||||
return recallAtK;
|
||||
}
|
||||
|
||||
|
|
|
@ -908,17 +908,20 @@ public abstract class AbstractAsyncBulkByScrollAction<
|
|||
|
||||
protected RequestWrapper<?> scriptChangedOpType(RequestWrapper<?> request, OpType oldOpType, OpType newOpType) {
|
||||
switch (newOpType) {
|
||||
case NOOP:
|
||||
case NOOP -> {
|
||||
taskWorker.countNoop();
|
||||
return null;
|
||||
case DELETE:
|
||||
}
|
||||
case DELETE -> {
|
||||
RequestWrapper<DeleteRequest> delete = wrap(new DeleteRequest(request.getIndex(), request.getId()));
|
||||
delete.setVersion(request.getVersion());
|
||||
delete.setVersionType(VersionType.INTERNAL);
|
||||
delete.setRouting(request.getRouting());
|
||||
return delete;
|
||||
default:
|
||||
throw new IllegalArgumentException("Unsupported operation type change from [" + oldOpType + "] to [" + newOpType + "]");
|
||||
}
|
||||
default -> throw new IllegalArgumentException(
|
||||
"Unsupported operation type change from [" + oldOpType + "] to [" + newOpType + "]"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -946,18 +949,14 @@ public abstract class AbstractAsyncBulkByScrollAction<
|
|||
|
||||
public static OpType fromString(String opType) {
|
||||
String lowerOpType = opType.toLowerCase(Locale.ROOT);
|
||||
switch (lowerOpType) {
|
||||
case "noop":
|
||||
return OpType.NOOP;
|
||||
case "index":
|
||||
return OpType.INDEX;
|
||||
case "delete":
|
||||
return OpType.DELETE;
|
||||
default:
|
||||
throw new IllegalArgumentException(
|
||||
"Operation type [" + lowerOpType + "] not allowed, only " + Arrays.toString(values()) + " are allowed"
|
||||
);
|
||||
}
|
||||
return switch (lowerOpType) {
|
||||
case "noop" -> OpType.NOOP;
|
||||
case "index" -> OpType.INDEX;
|
||||
case "delete" -> OpType.DELETE;
|
||||
default -> throw new IllegalArgumentException(
|
||||
"Operation type [" + lowerOpType + "] not allowed, only " + Arrays.toString(values()) + " are allowed"
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -336,14 +336,9 @@ public class Reindexer {
|
|||
return;
|
||||
}
|
||||
switch (routingSpec) {
|
||||
case "keep":
|
||||
super.copyRouting(request, routing);
|
||||
break;
|
||||
case "discard":
|
||||
super.copyRouting(request, null);
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("Unsupported routing command");
|
||||
case "keep" -> super.copyRouting(request, routing);
|
||||
case "discard" -> super.copyRouting(request, null);
|
||||
default -> throw new IllegalArgumentException("Unsupported routing command");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -308,23 +308,22 @@ public class AsyncBulkByScrollActionTests extends ESTestCase {
|
|||
boolean createdResponse;
|
||||
DocWriteRequest.OpType opType;
|
||||
switch (randomIntBetween(0, 2)) {
|
||||
case 0:
|
||||
case 0 -> {
|
||||
createdResponse = true;
|
||||
opType = DocWriteRequest.OpType.CREATE;
|
||||
created++;
|
||||
break;
|
||||
case 1:
|
||||
}
|
||||
case 1 -> {
|
||||
createdResponse = false;
|
||||
opType = randomFrom(DocWriteRequest.OpType.INDEX, DocWriteRequest.OpType.UPDATE);
|
||||
updated++;
|
||||
break;
|
||||
case 2:
|
||||
}
|
||||
case 2 -> {
|
||||
createdResponse = false;
|
||||
opType = DocWriteRequest.OpType.DELETE;
|
||||
deleted++;
|
||||
break;
|
||||
default:
|
||||
throw new RuntimeException("Bad scenario");
|
||||
}
|
||||
default -> throw new RuntimeException("Bad scenario");
|
||||
}
|
||||
final int seqNo = randomInt(20);
|
||||
final int primaryTerm = randomIntBetween(1, 16);
|
||||
|
@ -1146,15 +1145,18 @@ public class AsyncBulkByScrollActionTests extends ESTestCase {
|
|||
int wraps = randomIntBetween(0, 4);
|
||||
for (int i = 0; i < wraps; i++) {
|
||||
switch (randomIntBetween(0, 2)) {
|
||||
case 0:
|
||||
case 0 -> {
|
||||
e = new SearchPhaseExecutionException("test", "test failure", e, new ShardSearchFailure[0]);
|
||||
continue;
|
||||
case 1:
|
||||
}
|
||||
case 1 -> {
|
||||
e = new ReduceSearchPhaseException("test", "test failure", e, new ShardSearchFailure[0]);
|
||||
continue;
|
||||
case 2:
|
||||
}
|
||||
case 2 -> {
|
||||
e = new ElasticsearchException(e);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
return e;
|
||||
|
|
|
@ -107,14 +107,11 @@ public class AzureStorageService {
|
|||
return null;
|
||||
}
|
||||
|
||||
switch (proxy.type()) {
|
||||
case HTTP:
|
||||
return new ProxyOptions(ProxyOptions.Type.HTTP, (InetSocketAddress) proxy.address());
|
||||
case SOCKS:
|
||||
return new ProxyOptions(ProxyOptions.Type.SOCKS5, (InetSocketAddress) proxy.address());
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
return switch (proxy.type()) {
|
||||
case HTTP -> new ProxyOptions(ProxyOptions.Type.HTTP, (InetSocketAddress) proxy.address());
|
||||
case SOCKS -> new ProxyOptions(ProxyOptions.Type.SOCKS5, (InetSocketAddress) proxy.address());
|
||||
default -> null;
|
||||
};
|
||||
}
|
||||
|
||||
// non-static, package private for testing
|
||||
|
@ -138,22 +135,11 @@ public class AzureStorageService {
|
|||
throw new IllegalArgumentException("Unable to use " + locationMode + " location mode without a secondary location URI");
|
||||
}
|
||||
|
||||
final String secondaryHost;
|
||||
switch (locationMode) {
|
||||
case PRIMARY_ONLY:
|
||||
case SECONDARY_ONLY:
|
||||
secondaryHost = null;
|
||||
break;
|
||||
case PRIMARY_THEN_SECONDARY:
|
||||
secondaryHost = secondaryUri;
|
||||
break;
|
||||
case SECONDARY_THEN_PRIMARY:
|
||||
secondaryHost = primaryUri;
|
||||
break;
|
||||
default:
|
||||
assert false;
|
||||
throw new AssertionError("Impossible to get here");
|
||||
}
|
||||
final String secondaryHost = switch (locationMode) {
|
||||
case PRIMARY_ONLY, SECONDARY_ONLY -> null;
|
||||
case PRIMARY_THEN_SECONDARY -> secondaryUri;
|
||||
case SECONDARY_THEN_PRIMARY -> primaryUri;
|
||||
};
|
||||
|
||||
// The request retry policy uses seconds as the default time unit, since
|
||||
// it's possible to configure a timeout < 1s we should ceil that value
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue