mirror of
https://github.com/elastic/elasticsearch.git
synced 2025-06-28 09:28:55 -04:00
Cleanup Stream usage in various spots (#97306)
Lots of spots where we did weird things around streams like redundant stream creation, redundant collecting before adding all the collected elements to another collection or so, redundant streams for joining strings and using less efficient `Collectors.toList` and in a few cases also incorrectly relying on the result being mutable.
This commit is contained in:
parent
5eeaecd9cf
commit
63e64ae61b
117 changed files with 321 additions and 442 deletions
|
@ -50,15 +50,11 @@ import java.io.Writer;
|
|||
import java.nio.file.Files;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
import javax.inject.Inject;
|
||||
import java.io.Serializable;
|
||||
|
||||
import javax.inject.Inject;
|
||||
|
||||
/**
|
||||
* Checks files for license headers..
|
||||
*/
|
||||
|
@ -193,9 +189,8 @@ public abstract class LicenseHeadersTask extends DefaultTask {
|
|||
boolean unApprovedLicenses = stats.getNumUnApproved() > 0;
|
||||
if (unknownLicenses || unApprovedLicenses) {
|
||||
getLogger().error("The following files contain unapproved license headers:");
|
||||
unapprovedFiles(repFile).stream().forEachOrdered(unapprovedFile -> getLogger().error(unapprovedFile));
|
||||
throw new GradleException("Check failed. License header problems were found. Full details: " +
|
||||
repFile.getAbsolutePath());
|
||||
unapprovedFiles(repFile).forEach(getLogger()::error);
|
||||
throw new GradleException("Check failed. License header problems were found. Full details: " + repFile.getAbsolutePath());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -34,6 +34,7 @@ import java.util.Map;
|
|||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import javax.inject.Inject;
|
||||
|
||||
|
@ -287,7 +288,7 @@ public abstract class DockerSupportService implements BuildService<DockerSupport
|
|||
*/
|
||||
private Optional<String> getDockerPath() {
|
||||
// Check if the Docker binary exists
|
||||
return List.of(DOCKER_BINARIES).stream().filter(path -> new File(path).exists()).findFirst();
|
||||
return Stream.of(DOCKER_BINARIES).filter(path -> new File(path).exists()).findFirst();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -298,7 +299,7 @@ public abstract class DockerSupportService implements BuildService<DockerSupport
|
|||
*/
|
||||
private Optional<String> getDockerComposePath() {
|
||||
// Check if the Docker binary exists
|
||||
return List.of(DOCKER_COMPOSE_BINARIES).stream().filter(path -> new File(path).exists()).findFirst();
|
||||
return Stream.of(DOCKER_COMPOSE_BINARIES).filter(path -> new File(path).exists()).findFirst();
|
||||
}
|
||||
|
||||
private void throwDockerRequiredException(final String message) {
|
||||
|
|
|
@ -114,24 +114,19 @@ public abstract class ForbiddenPatternsTask extends DefaultTask {
|
|||
} catch (UncheckedIOException e) {
|
||||
throw new IllegalArgumentException("Failed to read " + f + " as UTF_8", e);
|
||||
}
|
||||
List<Integer> invalidLines = IntStream.range(0, lines.size())
|
||||
.filter(i -> allPatterns.matcher(lines.get(i)).find())
|
||||
.boxed()
|
||||
.collect(Collectors.toList());
|
||||
|
||||
URI baseUri = getRootDir().orElse(projectLayout.getProjectDirectory().getAsFile()).get().toURI();
|
||||
String path = baseUri.relativize(f.toURI()).toString();
|
||||
failures.addAll(
|
||||
invalidLines.stream()
|
||||
.map(l -> new AbstractMap.SimpleEntry<>(l + 1, lines.get(l)))
|
||||
IntStream.range(0, lines.size())
|
||||
.filter(i -> allPatterns.matcher(lines.get(i)).find())
|
||||
.mapToObj(l -> new AbstractMap.SimpleEntry<>(l + 1, lines.get(l)))
|
||||
.flatMap(
|
||||
kv -> patterns.entrySet()
|
||||
.stream()
|
||||
.filter(p -> Pattern.compile(p.getValue()).matcher(kv.getValue()).find())
|
||||
.map(p -> "- " + p.getKey() + " on line " + kv.getKey() + " of " + path)
|
||||
)
|
||||
.collect(Collectors.toList())
|
||||
);
|
||||
.forEach(failures::add);
|
||||
}
|
||||
if (failures.isEmpty() == false) {
|
||||
throw new GradleException("Found invalid patterns:\n" + String.join("\n", failures));
|
||||
|
|
|
@ -159,8 +159,7 @@ public class JavaModulePrecommitTask extends PrecommitTask {
|
|||
return ModuleFinder.of(filePath.toPath())
|
||||
.findAll()
|
||||
.stream()
|
||||
.sorted(Comparator.comparing(ModuleReference::descriptor))
|
||||
.findFirst()
|
||||
.min(Comparator.comparing(ModuleReference::descriptor))
|
||||
.orElseThrow(() -> new GradleException("module not found in " + filePath));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -48,7 +48,6 @@ import java.util.Map;
|
|||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import javax.inject.Inject;
|
||||
|
||||
|
@ -234,7 +233,7 @@ public class SplitPackagesAuditTask extends DefaultTask {
|
|||
String lastPackageName = null;
|
||||
Set<String> currentClasses = null;
|
||||
boolean filterErrorsFound = false;
|
||||
for (String fqcn : getParameters().getIgnoreClasses().get().stream().sorted().collect(Collectors.toList())) {
|
||||
for (String fqcn : getParameters().getIgnoreClasses().get().stream().sorted().toList()) {
|
||||
int lastDot = fqcn.lastIndexOf('.');
|
||||
if (lastDot == -1) {
|
||||
LOGGER.error("Missing package in classname in split package ignores: " + fqcn);
|
||||
|
|
|
@ -38,6 +38,7 @@ import java.util.Arrays;
|
|||
import java.util.List;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import javax.inject.Inject;
|
||||
|
||||
|
@ -128,7 +129,7 @@ public abstract class TestingConventionsCheckTask extends PrecommitTask {
|
|||
var mismatchingBaseClasses = testClassesCandidate.stream()
|
||||
.filter(testClassDefaultPredicate)
|
||||
.filter(TestingConventionsCheckWorkAction::seemsLikeATest)
|
||||
.collect(Collectors.toList());
|
||||
.toList();
|
||||
if (mismatchingBaseClasses.isEmpty() == false) {
|
||||
throw new GradleException(
|
||||
"Following test classes do not extend any supported base class:\n\t"
|
||||
|
@ -141,7 +142,7 @@ public abstract class TestingConventionsCheckTask extends PrecommitTask {
|
|||
// ensure base class matching do match suffix
|
||||
var matchingBaseClassNotMatchingSuffix = matchingBaseClass.stream()
|
||||
.filter(c -> suffixes.stream().allMatch(s -> c.getName().endsWith(s) == false))
|
||||
.collect(Collectors.toList());
|
||||
.toList();
|
||||
if (matchingBaseClassNotMatchingSuffix.isEmpty() == false) {
|
||||
throw new GradleException(
|
||||
"Following test classes do not match naming convention to use suffix "
|
||||
|
@ -202,8 +203,7 @@ public abstract class TestingConventionsCheckTask extends PrecommitTask {
|
|||
}
|
||||
|
||||
private static boolean isAnnotated(Method method, Class<?> annotation) {
|
||||
return List.of(method.getAnnotations())
|
||||
.stream()
|
||||
return Stream.of(method.getAnnotations())
|
||||
.anyMatch(presentAnnotation -> annotation.isAssignableFrom(presentAnnotation.getClass()));
|
||||
}
|
||||
|
||||
|
|
|
@ -389,8 +389,7 @@ public class DistroTestPlugin implements Plugin<Project> {
|
|||
List<ElasticsearchDistribution> currentDistros = new ArrayList<>();
|
||||
|
||||
for (Architecture architecture : Architecture.values()) {
|
||||
ALL_INTERNAL.stream()
|
||||
.forEach(
|
||||
ALL_INTERNAL.forEach(
|
||||
type -> currentDistros.add(
|
||||
createDistro(distributions, architecture, type, null, true, VersionProperties.getElasticsearch())
|
||||
)
|
||||
|
|
|
@ -38,13 +38,13 @@ public class RestTestTransformer {
|
|||
List<RestTestTransformGlobalSetup> setupTransforms = transformations.stream()
|
||||
.filter(transform -> transform instanceof RestTestTransformGlobalSetup)
|
||||
.map(transform -> (RestTestTransformGlobalSetup) transform)
|
||||
.collect(Collectors.toList());
|
||||
.toList();
|
||||
|
||||
// Collect any global teardown transformations
|
||||
List<RestTestTransformGlobalTeardown> teardownTransforms = transformations.stream()
|
||||
.filter(transform -> transform instanceof RestTestTransformGlobalTeardown)
|
||||
.map(transform -> (RestTestTransformGlobalTeardown) transform)
|
||||
.collect(Collectors.toList());
|
||||
.toList();
|
||||
|
||||
// Collect any transformations that are identified by an object key.
|
||||
Map<String, List<RestTestTransformByParentObject>> objectKeyFinders = transformations.stream()
|
||||
|
|
|
@ -75,9 +75,8 @@ public abstract class AdoptiumJdkToolchainResolver extends AbstractCustomJavaToo
|
|||
return Optional.of(
|
||||
Lists.newArrayList(versionsNode.iterator())
|
||||
.stream()
|
||||
.map(node -> toVersionInfo(node))
|
||||
.sorted(Comparator.comparing(AdoptiumVersionInfo::semver).reversed())
|
||||
.findFirst()
|
||||
.map(this::toVersionInfo)
|
||||
.max(Comparator.comparing(AdoptiumVersionInfo::semver))
|
||||
.get()
|
||||
);
|
||||
} catch (FileNotFoundException e) {
|
||||
|
|
|
@ -89,7 +89,7 @@ public class VagrantBasePlugin implements Plugin<Project> {
|
|||
}
|
||||
|
||||
String version = matcher.group(1);
|
||||
List<Integer> versionParts = Stream.of(version.split("\\.")).map(Integer::parseInt).collect(Collectors.toList());
|
||||
List<Integer> versionParts = Stream.of(version.split("\\.")).map(Integer::parseInt).toList();
|
||||
for (int i = 0; i < minVersion.length; ++i) {
|
||||
int found = versionParts.get(i);
|
||||
if (found > minVersion[i]) {
|
||||
|
|
|
@ -17,7 +17,6 @@ import java.util.ArrayList;
|
|||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.function.UnaryOperator;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.gradle.internal.vagrant.VagrantMachine.convertLinuxPath;
|
||||
import static org.elasticsearch.gradle.internal.vagrant.VagrantMachine.convertWindowsPath;
|
||||
|
@ -71,7 +70,7 @@ public abstract class VagrantShellTask extends DefaultTask {
|
|||
script.add("try {");
|
||||
script.add("cd " + convertWindowsPath(buildLayout.getRootDirectory(), buildLayout.getRootDirectory().toString()));
|
||||
extension.getVmEnv().forEach((k, v) -> script.add("$Env:" + k + " = \"" + v + "\""));
|
||||
script.addAll(getWindowsScript().stream().map(s -> " " + s).collect(Collectors.toList()));
|
||||
script.addAll(getWindowsScript().stream().map(s -> " " + s).toList());
|
||||
script.addAll(
|
||||
Arrays.asList(
|
||||
" exit $LASTEXITCODE",
|
||||
|
|
|
@ -16,7 +16,6 @@ import java.nio.file.Path;
|
|||
import java.nio.file.Paths;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
/**
|
||||
|
@ -62,7 +61,7 @@ public class Reaper implements Closeable {
|
|||
|
||||
private void reap() {
|
||||
try (Stream<Path> stream = Files.list(inputDir)) {
|
||||
final List<Path> inputFiles = stream.filter(p -> p.getFileName().toString().endsWith(".cmd")).collect(Collectors.toList());
|
||||
final List<Path> inputFiles = stream.filter(p -> p.getFileName().toString().endsWith(".cmd")).toList();
|
||||
|
||||
for (Path inputFile : inputFiles) {
|
||||
System.out.println("Process file: " + inputFile);
|
||||
|
|
|
@ -56,7 +56,7 @@ public class LazyPropertyList<T> extends AbstractLazyPropertyCollection implemen
|
|||
|
||||
@Override
|
||||
public <T1> T1[] toArray(T1[] a) {
|
||||
return delegate.stream().peek(this::validate).map(PropertyListEntry::getValue).collect(Collectors.toList()).toArray(a);
|
||||
return delegate.stream().peek(this::validate).map(PropertyListEntry::getValue).toList().toArray(a);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -79,7 +79,7 @@ public class LazyPropertyList<T> extends AbstractLazyPropertyCollection implemen
|
|||
|
||||
@Override
|
||||
public boolean containsAll(Collection<?> c) {
|
||||
return delegate.stream().map(PropertyListEntry::getValue).collect(Collectors.toList()).containsAll(c);
|
||||
return delegate.stream().map(PropertyListEntry::getValue).collect(Collectors.toSet()).containsAll(c);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -136,7 +136,7 @@ public class RunTask extends DefaultTestClustersTask {
|
|||
entry -> entry.getValue().toString()
|
||||
)
|
||||
);
|
||||
boolean singleNode = getClusters().stream().flatMap(c -> c.getNodes().stream()).count() == 1;
|
||||
boolean singleNode = getClusters().stream().mapToLong(c -> c.getNodes().size()).sum() == 1;
|
||||
final Function<ElasticsearchNode, Path> getDataPath;
|
||||
if (singleNode) {
|
||||
getDataPath = n -> dataDir;
|
||||
|
|
|
@ -26,7 +26,7 @@ public interface TestClustersAware extends Task {
|
|||
}
|
||||
|
||||
cluster.getNodes()
|
||||
.all(node -> node.getDistributions().stream().forEach(distro -> dependsOn(getProject().provider(() -> distro.maybeFreeze()))));
|
||||
.all(node -> node.getDistributions().forEach(distro -> dependsOn(getProject().provider(() -> distro.maybeFreeze()))));
|
||||
cluster.getNodes().all(node -> dependsOn((Callable<Collection<Configuration>>) node::getPluginAndModuleConfigurations));
|
||||
getClusters().add(cluster);
|
||||
}
|
||||
|
|
|
@ -18,7 +18,6 @@ import java.lang.management.GarbageCollectorMXBean;
|
|||
import java.lang.management.ManagementFactory;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public abstract class AbstractBenchmark<T extends Closeable> {
|
||||
private static final int SEARCH_BENCHMARK_ITERATIONS = 10_000;
|
||||
|
@ -92,7 +91,7 @@ public abstract class AbstractBenchmark<T extends Closeable> {
|
|||
String benchmarkTargetHost = args[1];
|
||||
String indexName = args[2];
|
||||
String searchBody = args[3];
|
||||
List<Integer> throughputRates = Arrays.asList(args[4].split(",")).stream().map(Integer::valueOf).collect(Collectors.toList());
|
||||
List<Integer> throughputRates = Arrays.stream(args[4].split(",")).map(Integer::valueOf).toList();
|
||||
|
||||
T client = client(benchmarkTargetHost);
|
||||
|
||||
|
|
|
@ -33,7 +33,6 @@ import java.security.DigestInputStream;
|
|||
import java.security.MessageDigest;
|
||||
import java.util.Arrays;
|
||||
import java.util.Locale;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
import java.util.zip.GZIPOutputStream;
|
||||
|
||||
|
@ -74,7 +73,7 @@ public class GeoIpCli extends Command {
|
|||
return;
|
||||
}
|
||||
try (Stream<Path> files = Files.list(source)) {
|
||||
for (Path path : files.filter(p -> p.getFileName().toString().endsWith(".tgz")).collect(Collectors.toList())) {
|
||||
for (Path path : files.filter(p -> p.getFileName().toString().endsWith(".tgz")).toList()) {
|
||||
Files.copy(path, target.resolve(path.getFileName()), StandardCopyOption.REPLACE_EXISTING);
|
||||
}
|
||||
}
|
||||
|
@ -82,7 +81,7 @@ public class GeoIpCli extends Command {
|
|||
|
||||
private void packDatabasesToTgz(Terminal terminal, Path source, Path target) throws IOException {
|
||||
try (Stream<Path> files = Files.list(source)) {
|
||||
for (Path path : files.filter(p -> p.getFileName().toString().endsWith(".mmdb")).collect(Collectors.toList())) {
|
||||
for (Path path : files.filter(p -> p.getFileName().toString().endsWith(".mmdb")).toList()) {
|
||||
String fileName = path.getFileName().toString();
|
||||
Path compressedPath = target.resolve(fileName.replaceAll("mmdb$", "") + "tgz");
|
||||
terminal.println("Found " + fileName + ", will compress it to " + compressedPath.getFileName());
|
||||
|
@ -111,7 +110,7 @@ public class GeoIpCli extends Command {
|
|||
XContentGenerator generator = XContentType.JSON.xContent().createGenerator(os)
|
||||
) {
|
||||
generator.writeStartArray();
|
||||
for (Path db : files.filter(p -> p.getFileName().toString().endsWith(".tgz")).collect(Collectors.toList())) {
|
||||
for (Path db : files.filter(p -> p.getFileName().toString().endsWith(".tgz")).toList()) {
|
||||
terminal.println("Adding " + db.getFileName() + " to overview.json");
|
||||
MessageDigest md5 = MessageDigests.md5();
|
||||
try (InputStream dis = new DigestInputStream(new BufferedInputStream(Files.newInputStream(db)), md5)) {
|
||||
|
|
|
@ -10,11 +10,12 @@ package org.elasticsearch.server.cli;
|
|||
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
final class SystemJvmOptions {
|
||||
|
||||
static List<String> systemJvmOptions() {
|
||||
return List.of(
|
||||
return Stream.of(
|
||||
/*
|
||||
* Cache ttl in seconds for positive DNS lookups noting that this overrides the JDK security property networkaddress.cache.ttl;
|
||||
* can be set to -1 to cache forever.
|
||||
|
@ -61,7 +62,7 @@ final class SystemJvmOptions {
|
|||
*/
|
||||
"--add-opens=java.base/java.io=org.elasticsearch.preallocate",
|
||||
maybeOverrideDockerCgroup()
|
||||
).stream().filter(e -> e.isEmpty() == false).collect(Collectors.toList());
|
||||
).filter(e -> e.isEmpty() == false).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
/*
|
||||
|
|
|
@ -108,7 +108,7 @@ public final class EmbeddedImplClassLoader extends SecureClassLoader {
|
|||
.collect(toUnmodifiableMap(k -> k.getKey().prefix(), Map.Entry::getValue));
|
||||
Map<String, JarMeta> map = new HashMap<>();
|
||||
for (var jarMeta : prefixToCodeBase.keySet()) {
|
||||
jarMeta.packages().stream().forEach(pkg -> {
|
||||
jarMeta.packages().forEach(pkg -> {
|
||||
var prev = map.put(pkg, jarMeta);
|
||||
assert prev == null;
|
||||
});
|
||||
|
|
|
@ -72,8 +72,8 @@ class InMemoryModuleFinder implements ModuleFinder {
|
|||
md.requires().stream().filter(req -> missingModules.contains(req.name()) == false).forEach(builder::requires);
|
||||
md.exports().forEach(builder::exports);
|
||||
md.opens().forEach(builder::opens);
|
||||
md.provides().stream().forEach(builder::provides);
|
||||
md.uses().stream().forEach(builder::uses);
|
||||
md.provides().forEach(builder::provides);
|
||||
md.uses().forEach(builder::uses);
|
||||
builder.packages(md.packages());
|
||||
return builder.build();
|
||||
}
|
||||
|
|
|
@ -129,7 +129,6 @@ public final class DissectParser {
|
|||
Set<String> appendKeyNames = dissectPairs.stream()
|
||||
.filter(dissectPair -> APPEND_MODIFIERS.contains(dissectPair.key().getModifier()))
|
||||
.map(KEY_NAME)
|
||||
.distinct()
|
||||
.collect(Collectors.toSet());
|
||||
if (appendKeyNames.size() > 0) {
|
||||
List<DissectPair> modifiedMatchPairs = new ArrayList<>(dissectPairs.size());
|
||||
|
|
|
@ -11,7 +11,6 @@ import java.util.Collections;
|
|||
import java.util.LinkedHashMap;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import javax.net.ssl.SSLParameters;
|
||||
|
||||
|
@ -82,7 +81,7 @@ public enum SslClientAuthenticationMode {
|
|||
public static SslClientAuthenticationMode parse(String value) {
|
||||
final SslClientAuthenticationMode mode = LOOKUP.get(value.toLowerCase(Locale.ROOT));
|
||||
if (mode == null) {
|
||||
final String allowedValues = LOOKUP.keySet().stream().collect(Collectors.joining(","));
|
||||
final String allowedValues = String.join(",", LOOKUP.keySet());
|
||||
throw new SslConfigException(
|
||||
"could not resolve ssl client authentication, unknown value [" + value + "], recognised values are [" + allowedValues + "]"
|
||||
);
|
||||
|
|
|
@ -236,9 +236,7 @@ public class SslDiagnostics {
|
|||
if (hostnames.isEmpty()) {
|
||||
message.append("; the certificate does not have any DNS/IP subject alternative names");
|
||||
} else {
|
||||
message.append("; the certificate has subject alternative names [")
|
||||
.append(hostnames.stream().collect(Collectors.joining(",")))
|
||||
.append("]");
|
||||
message.append("; the certificate has subject alternative names [").append(String.join(",", hostnames)).append("]");
|
||||
}
|
||||
}
|
||||
} catch (CertificateParsingException e) {
|
||||
|
|
|
@ -11,7 +11,6 @@ import java.util.Collections;
|
|||
import java.util.LinkedHashMap;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Represents the verification mode to be used for SSL connections.
|
||||
|
@ -84,7 +83,7 @@ public enum SslVerificationMode {
|
|||
public static SslVerificationMode parse(String value) {
|
||||
final SslVerificationMode mode = LOOKUP.get(value.toLowerCase(Locale.ROOT));
|
||||
if (mode == null) {
|
||||
final String allowedValues = LOOKUP.keySet().stream().collect(Collectors.joining(","));
|
||||
final String allowedValues = String.join(",", LOOKUP.keySet());
|
||||
throw new SslConfigException(
|
||||
"could not resolve ssl client verification mode, unknown value ["
|
||||
+ value
|
||||
|
|
|
@ -1072,7 +1072,7 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes
|
|||
|
||||
private Map<String, Integer> bucketCountsAsMap(InternalAutoDateHistogram result) {
|
||||
Map<String, Integer> map = Maps.newLinkedHashMapWithExpectedSize(result.getBuckets().size());
|
||||
result.getBuckets().stream().forEach(b -> {
|
||||
result.getBuckets().forEach(b -> {
|
||||
Object old = map.put(b.getKeyAsString(), Math.toIntExact(b.getDocCount()));
|
||||
assertNull(old);
|
||||
});
|
||||
|
@ -1081,7 +1081,7 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes
|
|||
|
||||
private Map<String, Double> maxAsMap(InternalAutoDateHistogram result) {
|
||||
Map<String, Double> map = Maps.newLinkedHashMapWithExpectedSize(result.getBuckets().size());
|
||||
result.getBuckets().stream().forEach(b -> {
|
||||
result.getBuckets().forEach(b -> {
|
||||
Max max = b.getAggregations().get("max");
|
||||
Object old = map.put(b.getKeyAsString(), max.value());
|
||||
assertNull(old);
|
||||
|
|
|
@ -670,14 +670,10 @@ public class UserTreeToXContent extends UserTreeBaseVisitor<ScriptScope> {
|
|||
if (decorations.isEmpty() == false) {
|
||||
builder.startArray(Fields.DECORATIONS);
|
||||
|
||||
List<Class<? extends Decoration>> dkeys = decorations.keySet()
|
||||
decorations.keySet()
|
||||
.stream()
|
||||
.sorted(Comparator.comparing(Class::getName))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
for (Class<? extends Decoration> dkey : dkeys) {
|
||||
DecorationToXContent.ToXContent(decorations.get(dkey), builder);
|
||||
}
|
||||
.forEachOrdered(dkey -> DecorationToXContent.ToXContent(decorations.get(dkey), builder));
|
||||
builder.endArray();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -45,7 +45,6 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* A {@link FieldMapper} that creates hierarchical joins (parent-join) between documents in the same index.
|
||||
|
@ -324,7 +323,7 @@ public final class ParentJoinFieldMapper extends FieldMapper {
|
|||
.map(mappingLookup::getFieldType)
|
||||
.filter(ft -> ft instanceof JoinFieldType)
|
||||
.map(MappedFieldType::name)
|
||||
.collect(Collectors.toList());
|
||||
.toList();
|
||||
if (joinFields.size() > 1) {
|
||||
throw new IllegalArgumentException("Only one [parent-join] field can be defined per index, got " + joinFields);
|
||||
}
|
||||
|
|
|
@ -125,8 +125,7 @@ final class QueryAnalyzer {
|
|||
partialResults.addAll(terms);
|
||||
}
|
||||
if (children.isEmpty() == false) {
|
||||
List<Result> childResults = children.stream().map(ResultBuilder::getResult).collect(Collectors.toList());
|
||||
partialResults.addAll(childResults);
|
||||
children.stream().map(ResultBuilder::getResult).forEach(partialResults::add);
|
||||
}
|
||||
if (partialResults.isEmpty()) {
|
||||
return verified ? Result.MATCH_NONE : Result.UNKNOWN;
|
||||
|
@ -243,7 +242,7 @@ final class QueryAnalyzer {
|
|||
}
|
||||
|
||||
private static Result handleConjunction(List<Result> conjunctionsWithUnknowns) {
|
||||
List<Result> conjunctions = conjunctionsWithUnknowns.stream().filter(r -> r.isUnknown() == false).collect(Collectors.toList());
|
||||
List<Result> conjunctions = conjunctionsWithUnknowns.stream().filter(r -> r.isUnknown() == false).toList();
|
||||
if (conjunctions.isEmpty()) {
|
||||
if (conjunctionsWithUnknowns.isEmpty()) {
|
||||
throw new IllegalArgumentException("Must have at least one conjunction sub result");
|
||||
|
|
|
@ -19,7 +19,6 @@ import org.elasticsearch.xcontent.XContentParser;
|
|||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.OptionalInt;
|
||||
|
@ -133,8 +132,10 @@ public class DiscountedCumulativeGain implements EvaluationMetric {
|
|||
double idcg = 0;
|
||||
|
||||
if (normalize) {
|
||||
List<Integer> allRatings = ratedDocs.stream().mapToInt(RatedDocument::getRating).boxed().collect(Collectors.toList());
|
||||
Collections.sort(allRatings, Comparator.nullsLast(Collections.reverseOrder()));
|
||||
List<Integer> allRatings = ratedDocs.stream()
|
||||
.map(RatedDocument::getRating)
|
||||
.sorted(Collections.reverseOrder())
|
||||
.collect(Collectors.toList());
|
||||
idcg = computeDCG(allRatings.subList(0, Math.min(ratingsInSearchHits.size(), allRatings.size())));
|
||||
if (idcg != 0) {
|
||||
result = dcg / idcg;
|
||||
|
|
|
@ -126,7 +126,7 @@ public class TransportNodesReloadSecureSettingsAction extends TransportNodesActi
|
|||
final Settings settingsWithKeystore = Settings.builder().put(environment.settings(), false).setSecureSettings(keystore).build();
|
||||
final List<Exception> exceptions = new ArrayList<>();
|
||||
// broadcast the new settings object (with the open embedded keystore) to all reloadable plugins
|
||||
pluginsService.filterPlugins(ReloadablePlugin.class).stream().forEach(p -> {
|
||||
pluginsService.filterPlugins(ReloadablePlugin.class).forEach(p -> {
|
||||
try {
|
||||
p.reload(settingsWithKeystore);
|
||||
} catch (final Exception e) {
|
||||
|
|
|
@ -24,7 +24,6 @@ import org.elasticsearch.xcontent.XContentParserConfiguration;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -166,10 +165,10 @@ public class ReservedComposableIndexTemplateAction
|
|||
}
|
||||
|
||||
Set<String> composableEntities = composables.stream().map(r -> reservedComposableIndexName(r.name())).collect(Collectors.toSet());
|
||||
Set<String> composablesToDelete = new HashSet<>(
|
||||
prevState.keys().stream().filter(k -> k.startsWith(COMPOSABLE_PREFIX)).collect(Collectors.toSet())
|
||||
);
|
||||
composablesToDelete.removeAll(composableEntities);
|
||||
Set<String> composablesToDelete = prevState.keys()
|
||||
.stream()
|
||||
.filter(k -> k.startsWith(COMPOSABLE_PREFIX) && composableEntities.contains(k) == false)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
// 3. delete composable index templates (this will fail on attached data streams, unless we added a higher priority one)
|
||||
if (composablesToDelete.isEmpty() == false) {
|
||||
|
@ -183,9 +182,7 @@ public class ReservedComposableIndexTemplateAction
|
|||
}
|
||||
|
||||
Set<String> componentEntities = components.stream().map(r -> reservedComponentName(r.name())).collect(Collectors.toSet());
|
||||
Set<String> componentsToDelete = new HashSet<>(
|
||||
prevState.keys().stream().filter(k -> k.startsWith(COMPONENT_PREFIX)).collect(Collectors.toSet())
|
||||
);
|
||||
Set<String> componentsToDelete = prevState.keys().stream().filter(k -> k.startsWith(COMPONENT_PREFIX)).collect(Collectors.toSet());
|
||||
componentsToDelete.removeAll(componentEntities);
|
||||
|
||||
// 5. delete component templates (this will check if there are any related composable index templates and fail)
|
||||
|
|
|
@ -17,8 +17,6 @@ import java.util.ArrayList;
|
|||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
import static java.util.stream.Collectors.toList;
|
||||
|
||||
public class SuggestingErrorOnUnknown implements ErrorOnUnknown {
|
||||
@Override
|
||||
public String errorMessage(String parserName, String unknownField, Iterable<String> candidates) {
|
||||
|
@ -55,7 +53,7 @@ public class SuggestingErrorOnUnknown implements ErrorOnUnknown {
|
|||
}
|
||||
return a.v2().compareTo(b.v2());
|
||||
});
|
||||
List<String> keys = scored.stream().map(Tuple::v2).collect(toList());
|
||||
List<String> keys = scored.stream().map(Tuple::v2).toList();
|
||||
StringBuilder builder = new StringBuilder(" did you mean ");
|
||||
if (keys.size() == 1) {
|
||||
builder.append("[").append(keys.get(0)).append("]");
|
||||
|
|
|
@ -97,7 +97,7 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
|
|||
* @param values The terms
|
||||
*/
|
||||
public TermsQueryBuilder(String fieldName, int... values) {
|
||||
this(fieldName, values != null ? Arrays.stream(values).mapToObj(s -> s).toList() : (Iterable<?>) null);
|
||||
this(fieldName, values != null ? Arrays.stream(values).boxed().toList() : null);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -107,7 +107,7 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
|
|||
* @param values The terms
|
||||
*/
|
||||
public TermsQueryBuilder(String fieldName, long... values) {
|
||||
this(fieldName, values != null ? Arrays.stream(values).mapToObj(s -> s).toList() : (Iterable<?>) null);
|
||||
this(fieldName, values != null ? Arrays.stream(values).boxed().toList() : null);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -117,7 +117,7 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
|
|||
* @param values The terms
|
||||
*/
|
||||
public TermsQueryBuilder(String fieldName, float... values) {
|
||||
this(fieldName, values != null ? IntStream.range(0, values.length).mapToObj(i -> values[i]).toList() : (Iterable<?>) null);
|
||||
this(fieldName, values != null ? IntStream.range(0, values.length).mapToObj(i -> values[i]).toList() : null);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -127,7 +127,7 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
|
|||
* @param values The terms
|
||||
*/
|
||||
public TermsQueryBuilder(String fieldName, double... values) {
|
||||
this(fieldName, values != null ? Arrays.stream(values).mapToObj(s -> s).toList() : (Iterable<?>) null);
|
||||
this(fieldName, values != null ? Arrays.stream(values).boxed().toList() : null);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -137,7 +137,7 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
|
|||
* @param values The terms
|
||||
*/
|
||||
public TermsQueryBuilder(String fieldName, Object... values) {
|
||||
this(fieldName, values != null ? Arrays.asList(values) : (Iterable<?>) null);
|
||||
this(fieldName, values != null ? Arrays.asList(values) : null);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -110,7 +110,7 @@ public class ShardFieldUsageTracker {
|
|||
|
||||
@Override
|
||||
public void close() {
|
||||
usages.entrySet().stream().forEach(e -> {
|
||||
usages.entrySet().forEach(e -> {
|
||||
InternalFieldStats fieldStats = perFieldStats.computeIfAbsent(e.getKey(), f -> new InternalFieldStats());
|
||||
PerField pf = e.getValue();
|
||||
boolean any = false;
|
||||
|
|
|
@ -59,7 +59,6 @@ import java.util.function.LongConsumer;
|
|||
import java.util.function.LongSupplier;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static org.elasticsearch.core.Strings.format;
|
||||
|
@ -375,7 +374,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
|
|||
return false;
|
||||
}
|
||||
}). // find all inner callers including Translog subclasses
|
||||
collect(Collectors.toList());
|
||||
toList();
|
||||
// the list of inner callers should be either empty or should contain closeOnTragicEvent method
|
||||
return frames.isEmpty() || frames.stream().anyMatch(f -> f.getMethodName().equals("closeOnTragicEvent"));
|
||||
}
|
||||
|
|
|
@ -73,8 +73,8 @@ public class ModuleSupport {
|
|||
Predicate<String> isPackageInParentLayers
|
||||
) throws IOException {
|
||||
var builder = ModuleDescriptor.newOpenModule(name); // open module, for now
|
||||
requires.stream().forEach(builder::requires);
|
||||
uses.stream().forEach(builder::uses);
|
||||
requires.forEach(builder::requires);
|
||||
uses.forEach(builder::uses);
|
||||
|
||||
// scan the names of the entries in the JARs
|
||||
Set<String> pkgs = new HashSet<>();
|
||||
|
|
|
@ -30,8 +30,6 @@ import java.util.List;
|
|||
import java.util.Objects;
|
||||
import java.util.function.ToLongFunction;
|
||||
|
||||
import static java.util.stream.Collectors.toList;
|
||||
|
||||
/**
|
||||
* Implementations for {@link Bucket} ordering strategies.
|
||||
*/
|
||||
|
@ -187,7 +185,7 @@ public abstract class InternalOrder extends BucketOrder {
|
|||
public <T extends Bucket> Comparator<T> partiallyBuiltBucketComparator(ToLongFunction<T> ordinalReader, Aggregator aggregator) {
|
||||
List<Comparator<T>> comparators = orderElements.stream()
|
||||
.map(oe -> oe.partiallyBuiltBucketComparator(ordinalReader, aggregator))
|
||||
.collect(toList());
|
||||
.toList();
|
||||
return (lhs, rhs) -> {
|
||||
for (Comparator<T> c : comparators) {
|
||||
int result = c.compare(lhs, rhs);
|
||||
|
@ -201,7 +199,7 @@ public abstract class InternalOrder extends BucketOrder {
|
|||
|
||||
@Override
|
||||
public Comparator<Bucket> comparator() {
|
||||
List<Comparator<Bucket>> comparators = orderElements.stream().map(BucketOrder::comparator).collect(toList());
|
||||
List<Comparator<Bucket>> comparators = orderElements.stream().map(BucketOrder::comparator).toList();
|
||||
return (lhs, rhs) -> {
|
||||
for (Comparator<Bucket> c : comparators) {
|
||||
int result = c.compare(lhs, rhs);
|
||||
|
@ -217,7 +215,7 @@ public abstract class InternalOrder extends BucketOrder {
|
|||
Comparator<DelayedBucket<? extends Bucket>> delayedBucketComparator() {
|
||||
List<Comparator<DelayedBucket<? extends Bucket>>> comparators = orderElements.stream()
|
||||
.map(BucketOrder::delayedBucketComparator)
|
||||
.collect(toList());
|
||||
.toList();
|
||||
return (lhs, rhs) -> {
|
||||
for (Comparator<DelayedBucket<? extends Bucket>> c : comparators) {
|
||||
int result = c.compare(lhs, rhs);
|
||||
|
|
|
@ -21,7 +21,7 @@ import java.util.Comparator;
|
|||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import static java.util.stream.Collectors.toSet;
|
||||
import static java.util.stream.Collectors.toUnmodifiableSet;
|
||||
|
||||
public final class QueryRescorer implements Rescorer {
|
||||
|
||||
|
@ -57,9 +57,7 @@ public final class QueryRescorer implements Rescorer {
|
|||
TopDocs topNFirstPass = topN(topDocs, rescoreContext.getWindowSize());
|
||||
|
||||
// Save doc IDs for which rescoring was applied to be used in score explanation
|
||||
Set<Integer> topNDocIDs = Collections.unmodifiableSet(
|
||||
Arrays.stream(topNFirstPass.scoreDocs).map(scoreDoc -> scoreDoc.doc).collect(toSet())
|
||||
);
|
||||
Set<Integer> topNDocIDs = Arrays.stream(topNFirstPass.scoreDocs).map(scoreDoc -> scoreDoc.doc).collect(toUnmodifiableSet());
|
||||
rescoreContext.setRescoredDocs(topNDocIDs);
|
||||
|
||||
// Rescore them:
|
||||
|
|
|
@ -250,13 +250,13 @@ public class DiscoveryNodesTests extends ESTestCase {
|
|||
DiscoveryNode masterB = randomBoolean() ? null : RandomPicks.randomFrom(random(), nodesB);
|
||||
|
||||
DiscoveryNodes.Builder builderA = DiscoveryNodes.builder();
|
||||
nodesA.stream().forEach(builderA::add);
|
||||
nodesA.forEach(builderA::add);
|
||||
final String masterAId = masterA == null ? null : masterA.getId();
|
||||
builderA.masterNodeId(masterAId);
|
||||
builderA.localNodeId(RandomPicks.randomFrom(random(), nodesA).getId());
|
||||
|
||||
DiscoveryNodes.Builder builderB = DiscoveryNodes.builder();
|
||||
nodesB.stream().forEach(builderB::add);
|
||||
nodesB.forEach(builderB::add);
|
||||
final String masterBId = masterB == null ? null : masterB.getId();
|
||||
builderB.masterNodeId(masterBId);
|
||||
builderB.localNodeId(RandomPicks.randomFrom(random(), nodesB).getId());
|
||||
|
|
|
@ -158,9 +158,9 @@ public class FailedNodeRoutingTests extends ESAllocationTestCase {
|
|||
// Pick a random subset of primaries to fail
|
||||
List<FailedShard> shardsToFail = new ArrayList<>();
|
||||
List<ShardRouting> failedPrimaries = randomSubsetOf(primaries);
|
||||
failedPrimaries.stream().forEach(sr -> {
|
||||
shardsToFail.add(new FailedShard(randomFrom(sr), "failed primary", new Exception(), randomBoolean()));
|
||||
});
|
||||
failedPrimaries.forEach(
|
||||
sr -> shardsToFail.add(new FailedShard(randomFrom(sr), "failed primary", new Exception(), randomBoolean()))
|
||||
);
|
||||
|
||||
logger.info("--> state before failing shards: {}", state);
|
||||
state = cluster.applyFailedShards(state, shardsToFail);
|
||||
|
@ -173,7 +173,7 @@ public class FailedNodeRoutingTests extends ESAllocationTestCase {
|
|||
Version newPrimaryVersion = getNodeVersion(newPrimary, compareState);
|
||||
|
||||
logger.info("--> new primary is on version {}: {}", newPrimaryVersion, newPrimary);
|
||||
compareState.routingTable().shardRoutingTable(newPrimary.shardId()).shardsWithState(STARTED).stream().forEach(sr -> {
|
||||
compareState.routingTable().shardRoutingTable(newPrimary.shardId()).shardsWithState(STARTED).forEach(sr -> {
|
||||
Version candidateVer = getNodeVersion(sr, compareState);
|
||||
if (candidateVer != null) {
|
||||
logger.info("--> candidate on {} node; shard routing: {}", candidateVer, sr);
|
||||
|
|
|
@ -82,7 +82,7 @@ public class RangeAggregationBuilderTests extends AbstractXContentSerializingTes
|
|||
default -> fail();
|
||||
}
|
||||
RangeAggregationBuilder mutant = new RangeAggregationBuilder(name).keyed(keyed).field(field);
|
||||
ranges.stream().forEach(mutant::addRange);
|
||||
ranges.forEach(mutant::addRange);
|
||||
return mutant;
|
||||
}
|
||||
|
||||
|
|
|
@ -34,7 +34,6 @@ import java.util.Set;
|
|||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.repositories.azure.AzureFixtureHelper.assertValidBlockId;
|
||||
|
||||
|
@ -77,7 +76,7 @@ public class AzureHttpHandler implements HttpHandler {
|
|||
final List<String> blockIds = Arrays.stream(blockList.split("<Latest>"))
|
||||
.filter(line -> line.contains("</Latest>"))
|
||||
.map(line -> line.substring(0, line.indexOf("</Latest>")))
|
||||
.collect(Collectors.toList());
|
||||
.toList();
|
||||
|
||||
final ByteArrayOutputStream blob = new ByteArrayOutputStream();
|
||||
for (String blockId : blockIds) {
|
||||
|
|
|
@ -187,7 +187,7 @@ public class CoordinationStateTestCluster {
|
|||
this.electionStrategy = electionStrategy;
|
||||
messages = new ArrayList<>();
|
||||
|
||||
clusterNodes = nodes.stream().map(node -> new ClusterNode(node, electionStrategy)).collect(Collectors.toList());
|
||||
clusterNodes = nodes.stream().map(node -> new ClusterNode(node, electionStrategy)).toList();
|
||||
|
||||
initialConfiguration = randomVotingConfig();
|
||||
initialValue = randomLong();
|
||||
|
@ -200,7 +200,7 @@ public class CoordinationStateTestCluster {
|
|||
}
|
||||
|
||||
void broadcast(DiscoveryNode sourceNode, Object payload) {
|
||||
messages.addAll(clusterNodes.stream().map(cn -> new Message(sourceNode, cn.localNode, payload)).collect(Collectors.toList()));
|
||||
clusterNodes.stream().map(cn -> new Message(sourceNode, cn.localNode, payload)).forEach(messages::add);
|
||||
}
|
||||
|
||||
Optional<ClusterNode> getNode(DiscoveryNode node) {
|
||||
|
@ -251,9 +251,7 @@ public class CoordinationStateTestCluster {
|
|||
} else if (rarely() && rarely()) {
|
||||
randomFrom(clusterNodes).reboot();
|
||||
} else if (rarely()) {
|
||||
final List<ClusterNode> masterNodes = clusterNodes.stream()
|
||||
.filter(cn -> cn.state.electionWon())
|
||||
.collect(Collectors.toList());
|
||||
final List<ClusterNode> masterNodes = clusterNodes.stream().filter(cn -> cn.state.electionWon()).toList();
|
||||
if (masterNodes.isEmpty() == false) {
|
||||
final ClusterNode clusterNode = randomFrom(masterNodes);
|
||||
final long term = rarely() ? randomLongBetween(0, maxTerm + 1) : clusterNode.state.getCurrentTerm();
|
||||
|
|
|
@ -187,11 +187,7 @@ public final class BlobStoreTestUtil {
|
|||
}
|
||||
|
||||
private static void assertIndexUUIDs(BlobStoreRepository repository, RepositoryData repositoryData) throws IOException {
|
||||
final List<String> expectedIndexUUIDs = repositoryData.getIndices()
|
||||
.values()
|
||||
.stream()
|
||||
.map(IndexId::getId)
|
||||
.collect(Collectors.toList());
|
||||
final List<String> expectedIndexUUIDs = repositoryData.getIndices().values().stream().map(IndexId::getId).toList();
|
||||
final BlobContainer indicesContainer = repository.blobContainer().children().get("indices");
|
||||
final List<String> foundIndexUUIDs;
|
||||
if (indicesContainer == null) {
|
||||
|
@ -231,7 +227,7 @@ public final class BlobStoreTestUtil {
|
|||
) throws IOException {
|
||||
final BlobContainer repoRoot = repository.blobContainer();
|
||||
final Collection<SnapshotId> snapshotIds = repositoryData.getSnapshotIds();
|
||||
final List<String> expectedSnapshotUUIDs = snapshotIds.stream().map(SnapshotId::getUUID).collect(Collectors.toList());
|
||||
final List<String> expectedSnapshotUUIDs = snapshotIds.stream().map(SnapshotId::getUUID).toList();
|
||||
for (String prefix : new String[] { BlobStoreRepository.SNAPSHOT_PREFIX, BlobStoreRepository.METADATA_PREFIX }) {
|
||||
final Collection<String> foundSnapshotUUIDs = repoRoot.listBlobs()
|
||||
.keySet()
|
||||
|
|
|
@ -60,7 +60,6 @@ import java.util.concurrent.ConcurrentHashMap;
|
|||
import java.util.concurrent.ConcurrentMap;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class MockRepository extends FsRepository {
|
||||
private static final Logger logger = LogManager.getLogger(MockRepository.class);
|
||||
|
@ -517,7 +516,7 @@ public class MockRepository extends FsRepository {
|
|||
final Map<String, BlobMetadata> blobs = listBlobs();
|
||||
long deleteBlobCount = blobs.size();
|
||||
long deleteByteCount = 0L;
|
||||
for (String blob : blobs.values().stream().map(BlobMetadata::name).collect(Collectors.toList())) {
|
||||
for (String blob : blobs.values().stream().map(BlobMetadata::name).toList()) {
|
||||
maybeIOExceptionOrBlock(blob);
|
||||
deleteBlobsIgnoringIfNotExists(Iterators.single(blob));
|
||||
deleteByteCount += blobs.get(blob).length();
|
||||
|
|
|
@ -219,7 +219,7 @@ public abstract class ESSingleNodeTestCase extends ESTestCase {
|
|||
protected List<String> filteredWarnings() {
|
||||
return Stream.concat(
|
||||
super.filteredWarnings().stream(),
|
||||
List.of("[index.data_path] setting was deprecated in Elasticsearch and will be removed in a future release.").stream()
|
||||
Stream.of("[index.data_path] setting was deprecated in Elasticsearch and will be removed in a future release.")
|
||||
).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
|
|
|
@ -192,7 +192,7 @@ public class VersionUtils {
|
|||
|
||||
/** Returns a random {@link Version} from all available versions, that is compatible with the given version. */
|
||||
public static Version randomCompatibleVersion(Random random, Version version) {
|
||||
final List<Version> compatible = ALL_VERSIONS.stream().filter(version::isCompatible).collect(Collectors.toList());
|
||||
final List<Version> compatible = ALL_VERSIONS.stream().filter(version::isCompatible).toList();
|
||||
return compatible.get(random.nextInt(compatible.size()));
|
||||
}
|
||||
|
||||
|
@ -228,10 +228,7 @@ public class VersionUtils {
|
|||
|
||||
/** Returns the maximum {@link Version} that is compatible with the given version. */
|
||||
public static Version maxCompatibleVersion(Version version) {
|
||||
final List<Version> compatible = ALL_VERSIONS.stream()
|
||||
.filter(version::isCompatible)
|
||||
.filter(version::onOrBefore)
|
||||
.collect(Collectors.toList());
|
||||
final List<Version> compatible = ALL_VERSIONS.stream().filter(version::isCompatible).filter(version::onOrBefore).toList();
|
||||
assert compatible.size() > 0;
|
||||
return compatible.get(compatible.size() - 1);
|
||||
}
|
||||
|
|
|
@ -839,13 +839,13 @@ public abstract class ESRestTestCase extends ESTestCase {
|
|||
if (hasIlm && false == preserveILMPoliciesUponCompletion()) {
|
||||
Set<String> unexpectedIlmPlicies = getAllUnexpectedIlmPolicies(preserveILMPolicyIds());
|
||||
assertTrue(
|
||||
"Expected no ILM policies after deletions, but found " + unexpectedIlmPlicies.stream().collect(Collectors.joining(", ")),
|
||||
"Expected no ILM policies after deletions, but found " + String.join(", ", unexpectedIlmPlicies),
|
||||
unexpectedIlmPlicies.isEmpty()
|
||||
);
|
||||
}
|
||||
Set<String> unexpectedTemplates = getAllUnexpectedTemplates();
|
||||
assertTrue(
|
||||
"Expected no templates after deletions, but found " + unexpectedTemplates.stream().collect(Collectors.joining(", ")),
|
||||
"Expected no templates after deletions, but found " + String.join(", ", unexpectedTemplates),
|
||||
unexpectedTemplates.isEmpty()
|
||||
);
|
||||
}
|
||||
|
@ -893,12 +893,10 @@ public abstract class ESRestTestCase extends ESTestCase {
|
|||
Request compReq = new Request("GET", "_component_template");
|
||||
String componentTemplates = EntityUtils.toString(adminClient().performRequest(compReq).getEntity());
|
||||
Map<String, Object> cTemplates = XContentHelper.convertToMap(JsonXContent.jsonXContent, componentTemplates, false);
|
||||
unexpectedTemplates.addAll(
|
||||
((List<?>) cTemplates.get("component_templates")).stream()
|
||||
.map(ct -> (String) ((Map<?, ?>) ct).get("name"))
|
||||
.filter(name -> isXPackTemplate(name) == false)
|
||||
.collect(Collectors.toList())
|
||||
);
|
||||
.forEach(unexpectedTemplates::add);
|
||||
}
|
||||
// Always check for legacy templates:
|
||||
Request getLegacyTemplatesRequest = new Request("GET", "_template");
|
||||
|
|
|
@ -61,7 +61,6 @@ import java.util.Optional;
|
|||
import java.util.Set;
|
||||
import java.util.SortedSet;
|
||||
import java.util.TreeSet;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder;
|
||||
|
||||
|
@ -326,7 +325,7 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase {
|
|||
}
|
||||
|
||||
filesSet.add(file);
|
||||
List<String> fileNames = filesSet.stream().map(p -> p.getFileName().toString()).collect(Collectors.toList());
|
||||
List<String> fileNames = filesSet.stream().map(p -> p.getFileName().toString()).toList();
|
||||
if (Collections.frequency(fileNames, file.getFileName().toString()) > 1) {
|
||||
Logger logger = LogManager.getLogger(ESClientYamlSuiteTestCase.class);
|
||||
logger.warn(
|
||||
|
|
|
@ -76,7 +76,7 @@ public class MovingPercentilesPipelineAggregator extends PipelineAggregator {
|
|||
List<TDigestState> values = buckets.stream()
|
||||
.map(b -> resolveTDigestBucketValue(histo, b, bucketsPaths()[0]))
|
||||
.filter(v -> v != null)
|
||||
.collect(Collectors.toList());
|
||||
.toList();
|
||||
|
||||
int index = 0;
|
||||
for (InternalMultiBucketAggregation.InternalBucket bucket : buckets) {
|
||||
|
@ -126,7 +126,7 @@ public class MovingPercentilesPipelineAggregator extends PipelineAggregator {
|
|||
List<DoubleHistogram> values = buckets.stream()
|
||||
.map(b -> resolveHDRBucketValue(histo, b, bucketsPaths()[0]))
|
||||
.filter(v -> v != null)
|
||||
.collect(Collectors.toList());
|
||||
.toList();
|
||||
|
||||
int index = 0;
|
||||
for (InternalMultiBucketAggregation.InternalBucket bucket : buckets) {
|
||||
|
|
|
@ -31,7 +31,6 @@ import java.util.Map;
|
|||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg;
|
||||
|
||||
|
@ -342,7 +341,7 @@ public class XPackInfoResponse extends ActionResponse implements ToXContentObjec
|
|||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
List<String> names = new ArrayList<>(this.featureSets.keySet()).stream().sorted().collect(Collectors.toList());
|
||||
List<String> names = new ArrayList<>(this.featureSets.keySet()).stream().sorted().toList();
|
||||
for (String name : names) {
|
||||
builder.field(name, featureSets.get(name), params);
|
||||
}
|
||||
|
|
|
@ -16,7 +16,6 @@ import org.elasticsearch.xpack.core.ccr.action.PauseFollowAction;
|
|||
import org.elasticsearch.xpack.core.ccr.action.ShardFollowTask;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
final class PauseFollowerIndexStep extends AbstractUnfollowIndexStep {
|
||||
|
||||
|
@ -46,7 +45,7 @@ final class PauseFollowerIndexStep extends AbstractUnfollowIndexStep {
|
|||
ShardFollowTask shardFollowTask = (ShardFollowTask) persistentTask.getParams();
|
||||
return shardFollowTask.getFollowShardId().getIndexName().equals(followerIndex);
|
||||
})
|
||||
.collect(Collectors.toList());
|
||||
.toList();
|
||||
|
||||
if (shardFollowTasks.isEmpty()) {
|
||||
listener.onResponse(null);
|
||||
|
|
|
@ -156,7 +156,7 @@ public final class PhaseCacheManagement {
|
|||
.stream()
|
||||
.filter(meta -> newPolicy.getName().equals(meta.getLifecyclePolicyName()))
|
||||
.filter(meta -> isIndexPhaseDefinitionUpdatable(xContentRegistry, client, meta, newPolicy.getPolicy(), licenseState))
|
||||
.collect(Collectors.toList());
|
||||
.toList();
|
||||
|
||||
final List<String> refreshedIndices = new ArrayList<>(indicesThatCanBeUpdated.size());
|
||||
for (IndexMetadata index : indicesThatCanBeUpdated) {
|
||||
|
|
|
@ -79,7 +79,7 @@ public class SegmentCountStep extends AsyncWaitStep {
|
|||
.stream()
|
||||
.flatMap(iss -> Arrays.stream(iss.shards()))
|
||||
.filter(shardSegments -> shardSegments.getSegments().size() > maxNumSegments)
|
||||
.collect(Collectors.toList());
|
||||
.toList();
|
||||
if (unmergedShards.size() > 0) {
|
||||
Map<ShardRouting, Integer> unmergedShardCounts = unmergedShards.stream()
|
||||
.collect(Collectors.toMap(ShardSegments::getShardRouting, ss -> ss.getSegments().size()));
|
||||
|
|
|
@ -63,7 +63,7 @@ final class WaitForFollowShardTasksStep extends AsyncWaitStep {
|
|||
.stream()
|
||||
.map(FollowStatsAction.StatsResponse::status)
|
||||
.filter(shardFollowStatus -> shardFollowStatus.leaderGlobalCheckpoint() != shardFollowStatus.followerGlobalCheckpoint())
|
||||
.collect(Collectors.toList());
|
||||
.toList();
|
||||
|
||||
// Follow stats api needs to return stats for follower index and all shard follow tasks should be synced:
|
||||
boolean conditionMet = responses.getStatsResponses().size() > 0 && unSyncedShardFollowStatuses.isEmpty();
|
||||
|
|
|
@ -20,7 +20,6 @@ import java.util.Comparator;
|
|||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.IntStream;
|
||||
|
||||
public final class InferenceHelpers {
|
||||
|
@ -68,7 +67,7 @@ public final class InferenceHelpers {
|
|||
|
||||
List<String> labels = classificationLabels == null ?
|
||||
// If we don't have the labels we should return the top classification values anyways, they will just be numeric
|
||||
IntStream.range(0, probabilities.length).boxed().map(String::valueOf).collect(Collectors.toList()) : classificationLabels;
|
||||
IntStream.range(0, probabilities.length).mapToObj(String::valueOf).toList() : classificationLabels;
|
||||
|
||||
int count = numToInclude < 0 ? probabilities.length : Math.min(numToInclude, probabilities.length);
|
||||
List<TopClassEntry> topClassEntries = new ArrayList<>(count);
|
||||
|
|
|
@ -200,10 +200,9 @@ public class Detector implements ToXContentObject, Writeable {
|
|||
* ", \
|
||||
*/
|
||||
public static final Character[] PROHIBITED_FIELDNAME_CHARACTERS = { '"', '\\' };
|
||||
public static final String PROHIBITED = String.join(
|
||||
",",
|
||||
Arrays.stream(PROHIBITED_FIELDNAME_CHARACTERS).map(c -> Character.toString(c)).collect(Collectors.toList())
|
||||
);
|
||||
public static final String PROHIBITED = Arrays.stream(PROHIBITED_FIELDNAME_CHARACTERS)
|
||||
.map(c -> Character.toString(c))
|
||||
.collect(Collectors.joining(","));
|
||||
|
||||
private final String detectorDescription;
|
||||
private final DetectorFunction function;
|
||||
|
|
|
@ -18,7 +18,6 @@ import java.util.Set;
|
|||
import java.util.SortedSet;
|
||||
import java.util.TreeSet;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Expands an expression into the set of matching names.
|
||||
|
@ -92,7 +91,7 @@ public abstract class NameResolver {
|
|||
.filter(key -> Regex.simpleMatch(token, key))
|
||||
.map(this::lookup)
|
||||
.flatMap(List::stream)
|
||||
.collect(Collectors.toList());
|
||||
.toList();
|
||||
if (expanded.isEmpty() && allowNoMatch == false) {
|
||||
throw notFoundExceptionSupplier.apply(token);
|
||||
}
|
||||
|
|
|
@ -14,11 +14,9 @@ import org.elasticsearch.xcontent.ToXContentObject;
|
|||
import org.elasticsearch.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Represents the rollup capabilities of a non-rollup index. E.g. what values/aggregations
|
||||
|
@ -35,9 +33,7 @@ public class RollableIndexCaps implements Writeable, ToXContentObject {
|
|||
|
||||
public RollableIndexCaps(String indexName, List<RollupJobCaps> caps) {
|
||||
this.indexName = indexName;
|
||||
this.jobCaps = Collections.unmodifiableList(
|
||||
Objects.requireNonNull(caps).stream().sorted(Comparator.comparing(RollupJobCaps::getJobID)).collect(Collectors.toList())
|
||||
);
|
||||
this.jobCaps = caps.stream().sorted(Comparator.comparing(RollupJobCaps::getJobID)).toList();
|
||||
}
|
||||
|
||||
public RollableIndexCaps(StreamInput in) throws IOException {
|
||||
|
|
|
@ -190,7 +190,7 @@ public class RollupJobCaps implements Writeable, ToXContentObject {
|
|||
final List<Map<String, Object>> metrics = metricConfig.getMetrics()
|
||||
.stream()
|
||||
.map(metric -> singletonMap("agg", (Object) metric))
|
||||
.collect(Collectors.toList());
|
||||
.toList();
|
||||
metrics.forEach(m -> {
|
||||
List<Map<String, Object>> caps = tempFieldCaps.getOrDefault(metricConfig.getField(), new ArrayList<>());
|
||||
caps.add(m);
|
||||
|
|
|
@ -27,7 +27,6 @@ import java.io.IOException;
|
|||
import java.util.Arrays;
|
||||
import java.util.Locale;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||
import static org.elasticsearch.common.settings.Settings.readSettingsFromStream;
|
||||
|
@ -71,7 +70,7 @@ public class MountSearchableSnapshotRequest extends MasterNodeRequest<MountSearc
|
|||
PARSER.declareField(optionalConstructorArg(), Settings::fromXContent, INDEX_SETTINGS_FIELD, ObjectParser.ValueType.OBJECT);
|
||||
PARSER.declareField(
|
||||
optionalConstructorArg(),
|
||||
p -> p.list().stream().map(s -> (String) s).collect(Collectors.toList()).toArray(Strings.EMPTY_ARRAY),
|
||||
p -> p.list().stream().map(s -> (String) s).toArray(String[]::new),
|
||||
IGNORE_INDEX_SETTINGS_FIELD,
|
||||
ObjectParser.ValueType.STRING_ARRAY
|
||||
);
|
||||
|
|
|
@ -24,7 +24,6 @@ import java.util.List;
|
|||
import java.util.Objects;
|
||||
|
||||
import static java.util.Collections.unmodifiableList;
|
||||
import static java.util.stream.Collectors.toList;
|
||||
|
||||
public class SearchableSnapshotShardStats implements Writeable, ToXContentObject {
|
||||
|
||||
|
@ -97,7 +96,7 @@ public class SearchableSnapshotShardStats implements Writeable, ToXContentObject
|
|||
{
|
||||
List<CacheIndexInputStats> stats = inputStats.stream()
|
||||
.sorted(Comparator.comparing(CacheIndexInputStats::getFileExt))
|
||||
.collect(toList());
|
||||
.toList();
|
||||
for (CacheIndexInputStats stat : stats) {
|
||||
stat.toXContent(builder, params);
|
||||
}
|
||||
|
|
|
@ -111,9 +111,7 @@ public final class PutPrivilegesRequest extends ActionRequest implements Applica
|
|||
|
||||
@Override
|
||||
public Collection<String> getApplicationNames() {
|
||||
return Collections.unmodifiableSet(
|
||||
privileges.stream().map(ApplicationPrivilegeDescriptor::getApplication).collect(Collectors.toSet())
|
||||
);
|
||||
return privileges.stream().map(ApplicationPrivilegeDescriptor::getApplication).collect(Collectors.toUnmodifiableSet());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -1393,10 +1393,10 @@ public final class Authentication implements ToXContentObject {
|
|||
|
||||
final Map<String, Object> roleDescriptorsMap = convertRoleDescriptorsBytesToMap(roleDescriptorsBytes);
|
||||
final AtomicBoolean removedAtLeastOne = new AtomicBoolean(false);
|
||||
roleDescriptorsMap.entrySet().stream().forEach(entry -> {
|
||||
if (entry.getValue() instanceof Map) {
|
||||
roleDescriptorsMap.forEach((key, value) -> {
|
||||
if (value instanceof Map) {
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> roleDescriptor = (Map<String, Object>) entry.getValue();
|
||||
Map<String, Object> roleDescriptor = (Map<String, Object>) value;
|
||||
boolean removed = roleDescriptor.remove(RoleDescriptor.Fields.REMOTE_INDICES.getPreferredName()) != null;
|
||||
if (removed) {
|
||||
removedAtLeastOne.set(true);
|
||||
|
|
|
@ -186,12 +186,12 @@ public class DefaultAuthenticationFailureHandler implements AuthenticationFailur
|
|||
ese = authenticationError(message, t, args);
|
||||
containsNegotiateWithToken = false;
|
||||
}
|
||||
defaultFailureResponseHeaders.entrySet().stream().forEach((e) -> {
|
||||
if (containsNegotiateWithToken && e.getKey().equalsIgnoreCase("WWW-Authenticate")) {
|
||||
defaultFailureResponseHeaders.forEach((key, value) -> {
|
||||
if (containsNegotiateWithToken && key.equalsIgnoreCase("WWW-Authenticate")) {
|
||||
return;
|
||||
}
|
||||
// If it is already present then it will replace the existing header.
|
||||
ese.addHeader(e.getKey(), e.getValue());
|
||||
ese.addHeader(key, value);
|
||||
});
|
||||
return ese;
|
||||
}
|
||||
|
|
|
@ -632,8 +632,7 @@ public interface AuthorizationEngine {
|
|||
|| Arrays.equals(IndicesAndAliasesResolverField.NO_INDICES_OR_ALIASES_ARRAY, indices)) {
|
||||
return null;
|
||||
}
|
||||
Set<String> deniedIndices = Arrays.asList(indices)
|
||||
.stream()
|
||||
Set<String> deniedIndices = Arrays.stream(indices)
|
||||
.filter(index -> false == indicesAccessControl.hasIndexPermissions(index))
|
||||
.collect(Collectors.toSet());
|
||||
return getFailureDescription(deniedIndices, restrictedIndices);
|
||||
|
|
|
@ -74,9 +74,7 @@ public final class ResourcePrivilegesMap {
|
|||
|
||||
public Builder addResourcePrivilegesMap(ResourcePrivilegesMap resourcePrivilegesMap) {
|
||||
resourcePrivilegesMap.getResourceToResourcePrivileges()
|
||||
.entrySet()
|
||||
.stream()
|
||||
.forEach(e -> this.addResourcePrivilege(e.getKey(), e.getValue().getPrivileges()));
|
||||
.forEach((key, value) -> this.addResourcePrivilege(key, value.getPrivileges()));
|
||||
return this;
|
||||
}
|
||||
|
||||
|
|
|
@ -230,7 +230,7 @@ public class SnapshotLifecycleStats implements Writeable, ToXContentObject {
|
|||
List<SnapshotPolicyStats> metrics = getMetrics().values()
|
||||
.stream()
|
||||
.sorted(Comparator.comparing(SnapshotPolicyStats::getPolicyId)) // maintain a consistent order when serializing
|
||||
.collect(Collectors.toList());
|
||||
.toList();
|
||||
long totalTaken = metrics.stream().mapToLong(s -> s.snapshotsTaken.count()).sum();
|
||||
long totalFailed = metrics.stream().mapToLong(s -> s.snapshotsFailed.count()).sum();
|
||||
long totalDeleted = metrics.stream().mapToLong(s -> s.snapshotsDeleted.count()).sum();
|
||||
|
|
|
@ -31,7 +31,6 @@ import java.util.Objects;
|
|||
import java.util.Set;
|
||||
import java.util.function.LongSupplier;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.core.Strings.format;
|
||||
|
||||
|
@ -132,9 +131,7 @@ public class SnapshotRetentionConfiguration implements ToXContentObject, Writeab
|
|||
*/
|
||||
public Predicate<SnapshotInfo> getSnapshotDeletionPredicate(final List<SnapshotInfo> allSnapshots) {
|
||||
final int totalSnapshotCount = allSnapshots.size();
|
||||
final List<SnapshotInfo> sortedSnapshots = allSnapshots.stream()
|
||||
.sorted(Comparator.comparingLong(SnapshotInfo::startTime))
|
||||
.collect(Collectors.toList());
|
||||
final List<SnapshotInfo> sortedSnapshots = allSnapshots.stream().sorted(Comparator.comparingLong(SnapshotInfo::startTime)).toList();
|
||||
int successCount = 0;
|
||||
long latestSuccessfulTimestamp = Long.MIN_VALUE;
|
||||
for (SnapshotInfo snapshot : allSnapshots) {
|
||||
|
|
|
@ -407,9 +407,10 @@ public class SSLService {
|
|||
}
|
||||
|
||||
public Set<String> getTransportProfileContextNames() {
|
||||
return Collections.unmodifiableSet(
|
||||
this.sslConfigurations.keySet().stream().filter(k -> k.startsWith("transport.profiles.")).collect(Collectors.toSet())
|
||||
);
|
||||
return this.sslConfigurations.keySet()
|
||||
.stream()
|
||||
.filter(k -> k.startsWith("transport.profiles."))
|
||||
.collect(Collectors.toUnmodifiableSet());
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -208,8 +208,7 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
|
|||
AllocationService allocationService,
|
||||
IndicesService indicesService
|
||||
) {
|
||||
List<Object> components = new ArrayList<>();
|
||||
components.addAll(
|
||||
List<Object> components = new ArrayList<>(
|
||||
super.createComponents(
|
||||
client,
|
||||
clusterService,
|
||||
|
@ -228,8 +227,7 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
|
|||
)
|
||||
);
|
||||
|
||||
filterPlugins(Plugin.class).stream()
|
||||
.forEach(
|
||||
filterPlugins(Plugin.class).forEach(
|
||||
p -> components.addAll(
|
||||
p.createComponents(
|
||||
client,
|
||||
|
@ -254,42 +252,36 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
|
|||
|
||||
@Override
|
||||
public Collection<RestHeaderDefinition> getRestHeaders() {
|
||||
List<RestHeaderDefinition> headers = new ArrayList<>();
|
||||
headers.addAll(super.getRestHeaders());
|
||||
filterPlugins(ActionPlugin.class).stream().forEach(p -> headers.addAll(p.getRestHeaders()));
|
||||
List<RestHeaderDefinition> headers = new ArrayList<>(super.getRestHeaders());
|
||||
filterPlugins(ActionPlugin.class).forEach(p -> headers.addAll(p.getRestHeaders()));
|
||||
return headers;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Setting<?>> getSettings() {
|
||||
ArrayList<Setting<?>> settings = new ArrayList<>();
|
||||
settings.addAll(super.getSettings());
|
||||
|
||||
filterPlugins(Plugin.class).stream().forEach(p -> settings.addAll(p.getSettings()));
|
||||
ArrayList<Setting<?>> settings = new ArrayList<>(super.getSettings());
|
||||
filterPlugins(Plugin.class).forEach(p -> settings.addAll(p.getSettings()));
|
||||
return settings;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getSettingsFilter() {
|
||||
List<String> filters = new ArrayList<>();
|
||||
filters.addAll(super.getSettingsFilter());
|
||||
filterPlugins(Plugin.class).stream().forEach(p -> filters.addAll(p.getSettingsFilter()));
|
||||
List<String> filters = new ArrayList<>(super.getSettingsFilter());
|
||||
filterPlugins(Plugin.class).forEach(p -> filters.addAll(p.getSettingsFilter()));
|
||||
return filters;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ActionHandler<? extends ActionRequest, ? extends ActionResponse>> getActions() {
|
||||
List<ActionHandler<? extends ActionRequest, ? extends ActionResponse>> actions = new ArrayList<>();
|
||||
actions.addAll(super.getActions());
|
||||
filterPlugins(ActionPlugin.class).stream().forEach(p -> actions.addAll(p.getActions()));
|
||||
List<ActionHandler<? extends ActionRequest, ? extends ActionResponse>> actions = new ArrayList<>(super.getActions());
|
||||
filterPlugins(ActionPlugin.class).forEach(p -> actions.addAll(p.getActions()));
|
||||
return actions;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ActionFilter> getActionFilters() {
|
||||
List<ActionFilter> filters = new ArrayList<>();
|
||||
filters.addAll(super.getActionFilters());
|
||||
filterPlugins(ActionPlugin.class).stream().forEach(p -> filters.addAll(p.getActionFilters()));
|
||||
List<ActionFilter> filters = new ArrayList<>(super.getActionFilters());
|
||||
filterPlugins(ActionPlugin.class).forEach(p -> filters.addAll(p.getActionFilters()));
|
||||
return filters;
|
||||
}
|
||||
|
||||
|
@ -303,8 +295,7 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
|
|||
IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
Supplier<DiscoveryNodes> nodesInCluster
|
||||
) {
|
||||
List<RestHandler> handlers = new ArrayList<>();
|
||||
handlers.addAll(
|
||||
List<RestHandler> handlers = new ArrayList<>(
|
||||
super.getRestHandlers(
|
||||
settings,
|
||||
restController,
|
||||
|
@ -315,8 +306,7 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
|
|||
nodesInCluster
|
||||
)
|
||||
);
|
||||
filterPlugins(ActionPlugin.class).stream()
|
||||
.forEach(
|
||||
filterPlugins(ActionPlugin.class).forEach(
|
||||
p -> handlers.addAll(
|
||||
p.getRestHandlers(
|
||||
settings,
|
||||
|
@ -334,8 +324,7 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
|
|||
|
||||
@Override
|
||||
public List<NamedWriteableRegistry.Entry> getNamedWriteables() {
|
||||
List<NamedWriteableRegistry.Entry> entries = new ArrayList<>();
|
||||
entries.addAll(super.getNamedWriteables());
|
||||
List<NamedWriteableRegistry.Entry> entries = new ArrayList<>(super.getNamedWriteables());
|
||||
for (Plugin p : plugins) {
|
||||
entries.addAll(p.getNamedWriteables());
|
||||
}
|
||||
|
@ -344,8 +333,7 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
|
|||
|
||||
@Override
|
||||
public List<NamedXContentRegistry.Entry> getNamedXContent() {
|
||||
List<NamedXContentRegistry.Entry> entries = new ArrayList<>();
|
||||
entries.addAll(super.getNamedXContent());
|
||||
List<NamedXContentRegistry.Entry> entries = new ArrayList<>(super.getNamedXContent());
|
||||
for (Plugin p : plugins) {
|
||||
entries.addAll(p.getNamedXContent());
|
||||
}
|
||||
|
@ -358,29 +346,30 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
|
|||
public Settings additionalSettings() {
|
||||
Settings.Builder builder = Settings.builder();
|
||||
builder.put(super.additionalSettings());
|
||||
filterPlugins(Plugin.class).stream().forEach(p -> builder.put(p.additionalSettings()));
|
||||
filterPlugins(Plugin.class).forEach(p -> builder.put(p.additionalSettings()));
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ScriptContext<?>> getContexts() {
|
||||
List<ScriptContext<?>> contexts = new ArrayList<>();
|
||||
filterPlugins(ScriptPlugin.class).stream().forEach(p -> contexts.addAll(p.getContexts()));
|
||||
filterPlugins(ScriptPlugin.class).forEach(p -> contexts.addAll(p.getContexts()));
|
||||
return contexts;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Processor.Factory> getProcessors(Processor.Parameters parameters) {
|
||||
Map<String, Processor.Factory> processors = new HashMap<>();
|
||||
filterPlugins(IngestPlugin.class).stream().forEach(p -> processors.putAll(p.getProcessors(parameters)));
|
||||
filterPlugins(IngestPlugin.class).forEach(p -> processors.putAll(p.getProcessors(parameters)));
|
||||
return processors;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<TransportInterceptor> getTransportInterceptors(NamedWriteableRegistry namedWriteableRegistry, ThreadContext threadContext) {
|
||||
List<TransportInterceptor> interceptors = new ArrayList<>();
|
||||
filterPlugins(NetworkPlugin.class).stream()
|
||||
.forEach(p -> interceptors.addAll(p.getTransportInterceptors(namedWriteableRegistry, threadContext)));
|
||||
filterPlugins(NetworkPlugin.class).forEach(
|
||||
p -> interceptors.addAll(p.getTransportInterceptors(namedWriteableRegistry, threadContext))
|
||||
);
|
||||
return interceptors;
|
||||
}
|
||||
|
||||
|
@ -393,12 +382,10 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
|
|||
NamedWriteableRegistry namedWriteableRegistry,
|
||||
NetworkService networkService
|
||||
) {
|
||||
Map<String, Supplier<Transport>> transports = new HashMap<>();
|
||||
transports.putAll(
|
||||
Map<String, Supplier<Transport>> transports = new HashMap<>(
|
||||
super.getTransports(settings, threadPool, pageCacheRecycler, circuitBreakerService, namedWriteableRegistry, networkService)
|
||||
);
|
||||
filterPlugins(NetworkPlugin.class).stream()
|
||||
.forEach(
|
||||
filterPlugins(NetworkPlugin.class).forEach(
|
||||
p -> transports.putAll(
|
||||
p.getTransports(settings, threadPool, pageCacheRecycler, circuitBreakerService, namedWriteableRegistry, networkService)
|
||||
)
|
||||
|
@ -422,8 +409,7 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
|
|||
Tracer tracer
|
||||
) {
|
||||
Map<String, Supplier<HttpServerTransport>> transports = new HashMap<>();
|
||||
filterPlugins(NetworkPlugin.class).stream()
|
||||
.forEach(
|
||||
filterPlugins(NetworkPlugin.class).forEach(
|
||||
p -> transports.putAll(
|
||||
p.getHttpTransports(
|
||||
settings,
|
||||
|
@ -446,7 +432,7 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
|
|||
@Override
|
||||
public List<BootstrapCheck> getBootstrapChecks() {
|
||||
List<BootstrapCheck> checks = new ArrayList<>();
|
||||
filterPlugins(Plugin.class).stream().forEach(p -> checks.addAll(p.getBootstrapChecks()));
|
||||
filterPlugins(Plugin.class).forEach(p -> checks.addAll(p.getBootstrapChecks()));
|
||||
return Collections.unmodifiableList(checks);
|
||||
}
|
||||
|
||||
|
@ -459,7 +445,7 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
|
|||
.map(RestInterceptorActionPlugin.class::cast)
|
||||
.map(p -> p.getRestHandlerInterceptor(threadContext))
|
||||
.filter(Objects::nonNull)
|
||||
.collect(Collectors.toList());
|
||||
.toList();
|
||||
|
||||
if (items.size() > 1) {
|
||||
throw new UnsupportedOperationException("Only the security ActionPlugin should override this");
|
||||
|
@ -473,7 +459,7 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
|
|||
@Override
|
||||
public List<ExecutorBuilder<?>> getExecutorBuilders(final Settings settings) {
|
||||
List<ExecutorBuilder<?>> builders = new ArrayList<>();
|
||||
filterPlugins(Plugin.class).stream().forEach(p -> builders.addAll(p.getExecutorBuilders(settings)));
|
||||
filterPlugins(Plugin.class).forEach(p -> builders.addAll(p.getExecutorBuilders(settings)));
|
||||
return builders;
|
||||
}
|
||||
|
||||
|
@ -490,14 +476,14 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
|
|||
@Override
|
||||
public Map<String, ElectionStrategy> getElectionStrategies() {
|
||||
Map<String, ElectionStrategy> electionStrategies = new HashMap<>();
|
||||
filterPlugins(ClusterCoordinationPlugin.class).stream().forEach(p -> electionStrategies.putAll(p.getElectionStrategies()));
|
||||
filterPlugins(ClusterCoordinationPlugin.class).forEach(p -> electionStrategies.putAll(p.getElectionStrategies()));
|
||||
return electionStrategies;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<IndexSettingProvider> getAdditionalIndexSettingProviders(IndexSettingProvider.Parameters parameters) {
|
||||
Set<IndexSettingProvider> providers = new HashSet<>();
|
||||
filterPlugins(Plugin.class).stream().forEach(p -> providers.addAll(p.getAdditionalIndexSettingProviders(parameters)));
|
||||
filterPlugins(Plugin.class).forEach(p -> providers.addAll(p.getAdditionalIndexSettingProviders(parameters)));
|
||||
providers.addAll(super.getAdditionalIndexSettingProviders(parameters));
|
||||
return providers;
|
||||
|
||||
|
@ -506,21 +492,19 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
|
|||
@Override
|
||||
public Map<String, AnalysisModule.AnalysisProvider<TokenizerFactory>> getTokenizers() {
|
||||
Map<String, AnalysisModule.AnalysisProvider<TokenizerFactory>> tokenizers = new HashMap<>();
|
||||
filterPlugins(AnalysisPlugin.class).stream().forEach(p -> tokenizers.putAll(p.getTokenizers()));
|
||||
filterPlugins(AnalysisPlugin.class).forEach(p -> tokenizers.putAll(p.getTokenizers()));
|
||||
return tokenizers;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onIndexModule(IndexModule indexModule) {
|
||||
super.onIndexModule(indexModule);
|
||||
filterPlugins(Plugin.class).stream().forEach(p -> p.onIndexModule(indexModule));
|
||||
filterPlugins(Plugin.class).forEach(p -> p.onIndexModule(indexModule));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Function<String, Predicate<String>> getFieldFilter() {
|
||||
List<Function<String, Predicate<String>>> items = filterPlugins(MapperPlugin.class).stream()
|
||||
.map(p -> p.getFieldFilter())
|
||||
.collect(Collectors.toList());
|
||||
List<Function<String, Predicate<String>>> items = filterPlugins(MapperPlugin.class).stream().map(p -> p.getFieldFilter()).toList();
|
||||
if (items.size() > 1) {
|
||||
throw new UnsupportedOperationException("Only the security MapperPlugin should override this");
|
||||
} else if (items.size() == 1) {
|
||||
|
@ -536,7 +520,7 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
|
|||
// There can be only one.
|
||||
List<BiConsumer<DiscoveryNode, ClusterState>> items = filterPlugins(ClusterCoordinationPlugin.class).stream()
|
||||
.map(p -> p.getJoinValidator())
|
||||
.collect(Collectors.toList());
|
||||
.toList();
|
||||
if (items.size() > 1) {
|
||||
throw new UnsupportedOperationException("Only the security DiscoveryPlugin should override this");
|
||||
} else if (items.size() == 1) {
|
||||
|
@ -598,7 +582,7 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
|
|||
List<BiConsumer<Snapshot, IndexVersion>> checks = filterPlugins(RepositoryPlugin.class).stream()
|
||||
.map(RepositoryPlugin::addPreRestoreVersionCheck)
|
||||
.filter(Objects::nonNull)
|
||||
.collect(Collectors.toList());
|
||||
.toList();
|
||||
return checks.isEmpty() ? null : (s, v) -> checks.forEach(c -> c.accept(s, v));
|
||||
}
|
||||
|
||||
|
@ -611,7 +595,7 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
|
|||
public Optional<EngineFactory> getEngineFactory(IndexSettings indexSettings) {
|
||||
List<Optional<EngineFactory>> enginePlugins = filterPlugins(EnginePlugin.class).stream()
|
||||
.map(p -> p.getEngineFactory(indexSettings))
|
||||
.collect(Collectors.toList());
|
||||
.toList();
|
||||
if (enginePlugins.size() == 0) {
|
||||
return Optional.empty();
|
||||
} else if (enginePlugins.size() == 1) {
|
||||
|
@ -639,7 +623,7 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
|
|||
deciders.addAll(
|
||||
filterPlugins(ClusterPlugin.class).stream()
|
||||
.flatMap(p -> p.createAllocationDeciders(settings, clusterSettings).stream())
|
||||
.collect(Collectors.toList())
|
||||
.toList()
|
||||
);
|
||||
deciders.addAll(super.createAllocationDeciders(settings, clusterSettings));
|
||||
return deciders;
|
||||
|
@ -648,21 +632,21 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
|
|||
@Override
|
||||
public Map<String, ExistingShardsAllocator> getExistingShardsAllocators() {
|
||||
final Map<String, ExistingShardsAllocator> allocators = new HashMap<>();
|
||||
filterPlugins(ClusterPlugin.class).stream().forEach(p -> allocators.putAll(p.getExistingShardsAllocators()));
|
||||
filterPlugins(ClusterPlugin.class).forEach(p -> allocators.putAll(p.getExistingShardsAllocators()));
|
||||
return allocators;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, IndexStorePlugin.DirectoryFactory> getDirectoryFactories() {
|
||||
final Map<String, IndexStorePlugin.DirectoryFactory> factories = new HashMap<>();
|
||||
filterPlugins(IndexStorePlugin.class).stream().forEach(p -> factories.putAll(p.getDirectoryFactories()));
|
||||
filterPlugins(IndexStorePlugin.class).forEach(p -> factories.putAll(p.getDirectoryFactories()));
|
||||
return factories;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, RecoveryStateFactory> getRecoveryStateFactories() {
|
||||
final Map<String, RecoveryStateFactory> factories = new HashMap<>();
|
||||
filterPlugins(IndexStorePlugin.class).stream().forEach(p -> factories.putAll(p.getRecoveryStateFactories()));
|
||||
filterPlugins(IndexStorePlugin.class).forEach(p -> factories.putAll(p.getRecoveryStateFactories()));
|
||||
return factories;
|
||||
}
|
||||
|
||||
|
|
|
@ -134,10 +134,10 @@ public class DeprecationInfoAction extends ActionType<DeprecationInfoAction.Resp
|
|||
Map<DeprecationIssue, List<String>> issueToListOfNodesMap = new HashMap<>();
|
||||
for (List<Tuple<DeprecationIssue, String>> similarIssues : issuesToMerge) {
|
||||
DeprecationIssue leastCommonDenominator = DeprecationIssue.getIntersectionOfRemovableSettings(
|
||||
similarIssues.stream().map(Tuple::v1).collect(Collectors.toList())
|
||||
similarIssues.stream().map(Tuple::v1).toList()
|
||||
);
|
||||
issueToListOfNodesMap.computeIfAbsent(leastCommonDenominator, (key) -> new ArrayList<>())
|
||||
.addAll(similarIssues.stream().map(Tuple::v2).collect(Collectors.toList()));
|
||||
.addAll(similarIssues.stream().map(Tuple::v2).toList());
|
||||
}
|
||||
return issueToListOfNodesMap;
|
||||
}
|
||||
|
|
|
@ -23,7 +23,6 @@ import java.util.Map;
|
|||
import java.util.function.BiConsumer;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Index-specific deprecation checks
|
||||
|
@ -212,7 +211,7 @@ public class IndexDeprecationChecks {
|
|||
);
|
||||
|
||||
if (fields.size() > 0) {
|
||||
String detailsMessageBeginning = fields.stream().collect(Collectors.joining(" "));
|
||||
String detailsMessageBeginning = String.join(" ", fields);
|
||||
return new DeprecationIssue(
|
||||
DeprecationIssue.Level.CRITICAL,
|
||||
"Date fields use deprecated camel case formats",
|
||||
|
|
|
@ -246,21 +246,19 @@ public class NodeDeprecationChecks {
|
|||
Settings clusterSettings,
|
||||
Settings nodeSettings
|
||||
) {
|
||||
List<Setting<?>> deprecatedConcreteNodeSettings = deprecatedAffixSetting.getAllConcreteSettings(nodeSettings)
|
||||
var deprecatedConcreteNodeSettings = deprecatedAffixSetting.getAllConcreteSettings(nodeSettings)
|
||||
.sorted(Comparator.comparing(Setting::getKey))
|
||||
.collect(Collectors.toList());
|
||||
List<Setting<?>> deprecatedConcreteClusterSettings = deprecatedAffixSetting.getAllConcreteSettings(clusterSettings)
|
||||
.toList();
|
||||
var deprecatedConcreteClusterSettings = deprecatedAffixSetting.getAllConcreteSettings(clusterSettings)
|
||||
.sorted(Comparator.comparing(Setting::getKey))
|
||||
.collect(Collectors.toList());
|
||||
.toList();
|
||||
|
||||
if (deprecatedConcreteNodeSettings.isEmpty() && deprecatedConcreteClusterSettings.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
List<String> deprecatedNodeSettingKeys = deprecatedConcreteNodeSettings.stream().map(Setting::getKey).collect(Collectors.toList());
|
||||
List<String> deprecatedClusterSettingKeys = deprecatedConcreteClusterSettings.stream()
|
||||
.map(Setting::getKey)
|
||||
.collect(Collectors.toList());
|
||||
List<String> deprecatedNodeSettingKeys = deprecatedConcreteNodeSettings.stream().map(Setting::getKey).toList();
|
||||
List<String> deprecatedClusterSettingKeys = deprecatedConcreteClusterSettings.stream().map(Setting::getKey).toList();
|
||||
|
||||
final String concatSettingNames = Stream.concat(deprecatedNodeSettingKeys.stream(), deprecatedClusterSettingKeys.stream())
|
||||
.distinct()
|
||||
|
@ -289,10 +287,10 @@ public class NodeDeprecationChecks {
|
|||
) {
|
||||
List<Setting<Settings>> deprecatedConcreteNodeSettings = deprecatedAffixSetting.getAllConcreteSettings(nodeSettings)
|
||||
.sorted(Comparator.comparing(Setting::getKey))
|
||||
.collect(Collectors.toList());
|
||||
.toList();
|
||||
List<Setting<Settings>> deprecatedConcreteClusterSettings = deprecatedAffixSetting.getAllConcreteSettings(clusterSettings)
|
||||
.sorted(Comparator.comparing(Setting::getKey))
|
||||
.collect(Collectors.toList());
|
||||
.toList();
|
||||
|
||||
if (deprecatedConcreteNodeSettings.isEmpty() && deprecatedConcreteClusterSettings.isEmpty()) {
|
||||
return null;
|
||||
|
@ -305,19 +303,19 @@ public class NodeDeprecationChecks {
|
|||
.map(key -> key + "*")
|
||||
.collect(Collectors.joining(","));
|
||||
// The actual group setting that are present in the settings objects, with full setting name prepended.
|
||||
List<String> allNodeSubSettingKeys = deprecatedConcreteNodeSettings.stream().map(affixSetting -> {
|
||||
List<String> allNodeSubSettingKeys = deprecatedConcreteNodeSettings.stream().flatMap(affixSetting -> {
|
||||
String groupPrefix = affixSetting.getKey();
|
||||
Settings groupSettings = affixSetting.get(nodeSettings);
|
||||
Set<String> subSettings = groupSettings.keySet();
|
||||
return subSettings.stream().map(key -> groupPrefix + key).collect(Collectors.toList());
|
||||
}).flatMap(List::stream).sorted().collect(Collectors.toList());
|
||||
return subSettings.stream().map(key -> groupPrefix + key);
|
||||
}).sorted().toList();
|
||||
|
||||
List<String> allClusterSubSettingKeys = deprecatedConcreteClusterSettings.stream().map(affixSetting -> {
|
||||
List<String> allClusterSubSettingKeys = deprecatedConcreteClusterSettings.stream().flatMap(affixSetting -> {
|
||||
String groupPrefix = affixSetting.getKey();
|
||||
Settings groupSettings = affixSetting.get(clusterSettings);
|
||||
Set<String> subSettings = groupSettings.keySet();
|
||||
return subSettings.stream().map(key -> groupPrefix + key).collect(Collectors.toList());
|
||||
}).flatMap(List::stream).sorted().collect(Collectors.toList());
|
||||
return subSettings.stream().map(key -> groupPrefix + key);
|
||||
}).sorted().toList();
|
||||
|
||||
final String allSubSettings = Stream.concat(allNodeSubSettingKeys.stream(), allClusterSubSettingKeys.stream())
|
||||
.distinct()
|
||||
|
|
|
@ -151,9 +151,7 @@ public class TransportDeprecationInfoAction extends TransportMasterNodeReadActio
|
|||
DeprecationChecker.Components components,
|
||||
ActionListener<Map<String, List<DeprecationIssue>>> listener
|
||||
) {
|
||||
List<DeprecationChecker> enabledCheckers = checkers.stream()
|
||||
.filter(c -> c.enabled(components.settings()))
|
||||
.collect(Collectors.toList());
|
||||
List<DeprecationChecker> enabledCheckers = checkers.stream().filter(c -> c.enabled(components.settings())).toList();
|
||||
if (enabledCheckers.isEmpty()) {
|
||||
listener.onResponse(Collections.emptyMap());
|
||||
return;
|
||||
|
|
|
@ -441,7 +441,7 @@ public class EqlSearchIT extends ESRestTestCase {
|
|||
}
|
||||
|
||||
List<Object> actualList = new ArrayList<>();
|
||||
events.stream().forEach(m -> actualList.add(m.get("_id")));
|
||||
events.forEach(m -> actualList.add(m.get("_id")));
|
||||
|
||||
if (false == expected.equals(actualList)) {
|
||||
NotEqualMessageBuilder message = new NotEqualMessageBuilder();
|
||||
|
|
|
@ -21,8 +21,6 @@ import java.util.LinkedHashSet;
|
|||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
import static java.util.stream.Collectors.toList;
|
||||
|
||||
public final class AnalysisUtils {
|
||||
|
||||
private AnalysisUtils() {}
|
||||
|
@ -67,7 +65,7 @@ public final class AnalysisUtils {
|
|||
"Reference ["
|
||||
+ u.qualifiedName()
|
||||
+ "] is ambiguous (to disambiguate use quotes or qualifiers); matches any of "
|
||||
+ matches.stream().map(a -> "\"" + a.qualifier() + "\".\"" + a.name() + "\"").sorted().collect(toList())
|
||||
+ matches.stream().map(a -> "\"" + a.qualifier() + "\".\"" + a.name() + "\"").sorted().toList()
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -25,7 +25,6 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
|||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.cluster.routing.allocation.AllocationService;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.settings.ClusterSettings;
|
||||
import org.elasticsearch.common.settings.IndexScopedSettings;
|
||||
|
@ -66,7 +65,6 @@ import java.util.EnumSet;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.xpack.core.ClientHelper.FLEET_ORIGIN;
|
||||
|
||||
|
@ -306,10 +304,7 @@ public class Fleet extends Plugin implements SystemIndexPlugin {
|
|||
if (dataStreamDescriptors.isEmpty() == false) {
|
||||
try {
|
||||
Request request = new Request(
|
||||
dataStreamDescriptors.stream()
|
||||
.map(SystemDataStreamDescriptor::getDataStreamName)
|
||||
.collect(Collectors.toList())
|
||||
.toArray(Strings.EMPTY_ARRAY)
|
||||
dataStreamDescriptors.stream().map(SystemDataStreamDescriptor::getDataStreamName).toArray(String[]::new)
|
||||
);
|
||||
EnumSet<Option> options = request.indicesOptions().options();
|
||||
options.add(Option.IGNORE_UNAVAILABLE);
|
||||
|
|
|
@ -85,7 +85,7 @@ public class TransportGetLifecycleAction extends TransportMasterNodeAction<Reque
|
|||
names = Arrays.asList(request.getPolicyNames());
|
||||
}
|
||||
|
||||
if (names.size() > 1 && names.stream().filter(Regex::isSimpleMatchPattern).count() > 0) {
|
||||
if (names.size() > 1 && names.stream().anyMatch(Regex::isSimpleMatchPattern)) {
|
||||
throw new IllegalArgumentException(
|
||||
"wildcard only supports a single value, please use comma-separated values or a single wildcard value"
|
||||
);
|
||||
|
|
|
@ -120,8 +120,8 @@ public class TransportGetPipelineAction extends HandledTransportAction<GetPipeli
|
|||
new GetPipelineResponse(
|
||||
Arrays.stream(mGetResponse.getResponses())
|
||||
.filter(itemResponse -> itemResponse.isFailed() == false)
|
||||
.filter(itemResponse -> itemResponse.getResponse().isExists())
|
||||
.map(MultiGetItemResponse::getResponse)
|
||||
.filter(GetResponse::isExists)
|
||||
.collect(Collectors.toMap(GetResponse::getId, GetResponse::getSourceAsBytesRef))
|
||||
)
|
||||
);
|
||||
|
|
|
@ -32,7 +32,6 @@ import org.elasticsearch.gateway.GatewayService;
|
|||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.xpack.core.ml.annotations.AnnotationIndex;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
@ -230,7 +229,7 @@ public class MlInitializationService implements ClusterStateListener {
|
|||
updateSettingsListener.onResponse(AcknowledgedResponse.TRUE);
|
||||
return;
|
||||
}
|
||||
String nonHiddenIndicesString = Arrays.stream(nonHiddenIndices).collect(Collectors.joining(", "));
|
||||
String nonHiddenIndicesString = String.join(", ", nonHiddenIndices);
|
||||
logger.debug("The following ML internal indices will now be made hidden: [{}]", nonHiddenIndicesString);
|
||||
UpdateSettingsRequest updateSettingsRequest = new UpdateSettingsRequest().indices(nonHiddenIndices)
|
||||
.indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN)
|
||||
|
|
|
@ -260,7 +260,7 @@ public class TransportGetTrainedModelsStatsAction extends HandledTransportAction
|
|||
matchedDeployments.add(assignment.getDeploymentId());
|
||||
}
|
||||
}
|
||||
String deployments = matchedDeployments.stream().collect(Collectors.joining(","));
|
||||
String deployments = String.join(",", matchedDeployments);
|
||||
|
||||
logger.debug("Fetching stats for deployments [{}]", deployments);
|
||||
|
||||
|
|
|
@ -53,7 +53,6 @@ import org.elasticsearch.xpack.ml.process.MlMemoryTracker;
|
|||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.core.Strings.format;
|
||||
import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN;
|
||||
|
@ -187,7 +186,7 @@ public class TransportOpenJobAction extends TransportMasterNodeAction<OpenJobAct
|
|||
referencedRuleFiltersPresentListener.onResponse(true);
|
||||
} else {
|
||||
GetFiltersAction.Request getFiltersRequest = new GetFiltersAction.Request();
|
||||
getFiltersRequest.setResourceId(referencedRuleFilters.stream().collect(Collectors.joining(",")));
|
||||
getFiltersRequest.setResourceId(String.join(",", referencedRuleFilters));
|
||||
getFiltersRequest.setAllowNoResources(false);
|
||||
client.execute(
|
||||
GetFiltersAction.INSTANCE,
|
||||
|
|
|
@ -291,7 +291,7 @@ public class TransportSetUpgradeModeAction extends AcknowledgedTransportMasterNo
|
|||
// We want to always have the same ordering of which tasks we un-allocate first.
|
||||
// However, the order in which the distributed tasks handle the un-allocation event is not guaranteed.
|
||||
.sorted(Comparator.comparing(PersistentTask::getTaskName))
|
||||
.collect(Collectors.toList());
|
||||
.toList();
|
||||
|
||||
logger.info(
|
||||
"Un-assigning persistent tasks : " + mlTasks.stream().map(PersistentTask::getId).collect(Collectors.joining(", ", "[ ", " ]"))
|
||||
|
|
|
@ -47,7 +47,6 @@ import java.util.Optional;
|
|||
import java.util.PriorityQueue;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static java.time.Instant.ofEpochMilli;
|
||||
|
@ -454,12 +453,7 @@ class MlMemoryAutoscalingDecider {
|
|||
if (unassignedJobs.isEmpty()) {
|
||||
return Optional.empty();
|
||||
}
|
||||
List<Long> jobSizes = unassignedJobs.stream()
|
||||
.map(sizeFunction)
|
||||
.map(l -> l == null ? 0L : l)
|
||||
.sorted(Comparator.comparingLong(Long::longValue).reversed())
|
||||
.collect(Collectors.toList());
|
||||
|
||||
List<Long> jobSizes = computeJobSizes(unassignedJobs, sizeFunction);
|
||||
long tierMemory = 0L;
|
||||
// Node memory needs to be AT LEAST the size of the largest job + the required overhead.
|
||||
long nodeMemory = jobSizes.get(0);
|
||||
|
@ -720,11 +714,7 @@ class MlMemoryAutoscalingDecider {
|
|||
for (NodeLoad load : nodeLoads) {
|
||||
mostFreeMemoryFirst.add(NodeLoad.builder(load));
|
||||
}
|
||||
List<Long> jobSizes = unassignedJobs.stream()
|
||||
.map(sizeFunction)
|
||||
.map(l -> l == null ? 0L : l)
|
||||
.sorted(Comparator.comparingLong(Long::longValue).reversed())
|
||||
.collect(Collectors.toList());
|
||||
List<Long> jobSizes = computeJobSizes(unassignedJobs, sizeFunction);
|
||||
|
||||
Iterator<Long> assignmentIter = jobSizes.iterator();
|
||||
while (jobSizes.size() > maxNumInQueue && assignmentIter.hasNext()) {
|
||||
|
@ -945,4 +935,13 @@ class MlMemoryAutoscalingDecider {
|
|||
private Long getAnomalyMemoryRequirement(PersistentTasksCustomMetadata.PersistentTask<?> task) {
|
||||
return getAnomalyMemoryRequirement(MlTasks.jobId(task.getId()));
|
||||
}
|
||||
|
||||
private static List<Long> computeJobSizes(List<String> unassignedJobs, Function<String, Long> sizeFunction) {
|
||||
List<Long> jobSizes = new ArrayList<>(unassignedJobs.size());
|
||||
for (String unassignedJob : unassignedJobs) {
|
||||
jobSizes.add(Objects.requireNonNullElse(sizeFunction.apply(unassignedJob), 0L));
|
||||
}
|
||||
jobSizes.sort(Comparator.comparingLong(Long::longValue).reversed());
|
||||
return jobSizes;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,7 +25,6 @@ import org.elasticsearch.xpack.ml.datafeed.persistence.DatafeedConfigProvider;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static java.util.stream.Collectors.toList;
|
||||
import static org.elasticsearch.core.Strings.format;
|
||||
|
@ -83,7 +82,7 @@ public class DatafeedConfigAutoUpdater implements MlAutoUpdateService.UpdateActi
|
|||
.setId(datafeedConfig.getId())
|
||||
.build()
|
||||
)
|
||||
.collect(Collectors.toList());
|
||||
.toList();
|
||||
if (updates.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -29,7 +29,6 @@ import java.util.List;
|
|||
import java.util.NoSuchElementException;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Abstract class for aggregated data extractors, e.g. {@link RollupDataExtractor}
|
||||
|
@ -168,8 +167,7 @@ abstract class AbstractAggregationDataExtractor<T extends ActionRequestBuilder<S
|
|||
}
|
||||
if (aggsAsList.size() > 1) {
|
||||
throw new IllegalArgumentException(
|
||||
"Multiple top level aggregations not supported; found: "
|
||||
+ aggsAsList.stream().map(Aggregation::getName).collect(Collectors.toList())
|
||||
"Multiple top level aggregations not supported; found: " + aggsAsList.stream().map(Aggregation::getName).toList()
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -146,7 +146,7 @@ public class RollupDataExtractorFactory implements DataExtractorFactory {
|
|||
|
||||
List<ParsedRollupCaps> validIntervalCaps = rollupCapsSet.stream()
|
||||
.filter(rollupCaps -> validInterval(datafeedInterval, rollupCaps))
|
||||
.collect(Collectors.toList());
|
||||
.toList();
|
||||
|
||||
if (validIntervalCaps.isEmpty()) {
|
||||
listener.onFailure(
|
||||
|
|
|
@ -67,7 +67,7 @@ public class TimeBasedExtractedFields extends ExtractedFields {
|
|||
throw new IllegalArgumentException("cannot retrieve time field [" + timeField + "] because it is not aggregatable");
|
||||
}
|
||||
ExtractedField timeExtractedField = extractedTimeField(timeField, scriptFields);
|
||||
List<String> remainingFields = job.allInputFields().stream().filter(f -> f.equals(timeField) == false).collect(Collectors.toList());
|
||||
List<String> remainingFields = job.allInputFields().stream().filter(f -> f.equals(timeField) == false).toList();
|
||||
List<ExtractedField> allExtractedFields = new ArrayList<>(remainingFields.size() + 1);
|
||||
allExtractedFields.add(timeExtractedField);
|
||||
remainingFields.forEach(field -> allExtractedFields.add(extractionMethodDetector.detect(field)));
|
||||
|
|
|
@ -65,7 +65,6 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN;
|
||||
import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin;
|
||||
|
@ -346,7 +345,7 @@ public class DataFrameAnalyticsConfigProvider {
|
|||
}
|
||||
|
||||
Set<String> tasksWithoutConfigs = new HashSet<>(jobsWithTask);
|
||||
tasksWithoutConfigs.removeAll(configs.stream().map(DataFrameAnalyticsConfig::getId).collect(Collectors.toList()));
|
||||
configs.stream().map(DataFrameAnalyticsConfig::getId).toList().forEach(tasksWithoutConfigs::remove);
|
||||
if (tasksWithoutConfigs.isEmpty() == false) {
|
||||
logger.warn("Data frame analytics tasks {} have no configs", tasksWithoutConfigs);
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@ import java.util.ArrayList;
|
|||
import java.util.Collection;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
@ -215,7 +216,7 @@ public class AssignmentPlan implements Comparable<AssignmentPlan> {
|
|||
}
|
||||
|
||||
StringBuilder msg = new StringBuilder();
|
||||
List<Node> nodes = nodeToModel.keySet().stream().sorted(Comparator.comparing(Node::id)).collect(Collectors.toList());
|
||||
List<Node> nodes = nodeToModel.keySet().stream().sorted(Comparator.comparing(Node::id)).toList();
|
||||
for (int i = 0; i < nodes.size(); i++) {
|
||||
Node n = nodes.get(i);
|
||||
msg.append(n);
|
||||
|
@ -223,7 +224,7 @@ public class AssignmentPlan implements Comparable<AssignmentPlan> {
|
|||
for (Tuple<Deployment, Integer> modelAllocations : nodeToModel.get(n)
|
||||
.stream()
|
||||
.sorted(Comparator.comparing(x -> x.v1().id()))
|
||||
.collect(Collectors.toList())) {
|
||||
.toList()) {
|
||||
if (modelAllocations.v2() > 0) {
|
||||
msg.append(" ");
|
||||
msg.append(modelAllocations.v1().id());
|
||||
|
@ -259,10 +260,10 @@ public class AssignmentPlan implements Comparable<AssignmentPlan> {
|
|||
private final Map<Deployment, Integer> remainingModelAllocations;
|
||||
|
||||
private Builder(Collection<Node> nodes, Collection<Deployment> deployments) {
|
||||
if (nodes.stream().collect(Collectors.toSet()).size() != nodes.size()) {
|
||||
if (new HashSet<>(nodes).size() != nodes.size()) {
|
||||
throw new IllegalArgumentException("there should be no duplicate nodes");
|
||||
}
|
||||
if (deployments.stream().collect(Collectors.toSet()).size() != deployments.size()) {
|
||||
if (new HashSet<>(deployments).size() != deployments.size()) {
|
||||
throw new IllegalArgumentException("there should be no duplicate models");
|
||||
}
|
||||
|
||||
|
|
|
@ -46,9 +46,7 @@ public class ZoneAwareAssignmentPlanner {
|
|||
}
|
||||
|
||||
private static Map<List<String>, List<Node>> sortByZone(Map<List<String>, List<Node>> nodesByZone) {
|
||||
Map<List<String>, List<Node>> sortedByZone = new TreeMap<>(
|
||||
Comparator.comparing(zoneAttributes -> zoneAttributes.stream().collect(Collectors.joining()))
|
||||
);
|
||||
Map<List<String>, List<Node>> sortedByZone = new TreeMap<>(Comparator.comparing(zoneAttributes -> String.join("", zoneAttributes)));
|
||||
sortedByZone.putAll(nodesByZone);
|
||||
return sortedByZone;
|
||||
}
|
||||
|
|
|
@ -52,7 +52,7 @@ public class OverallBucketsAggregator implements OverallBucketsProcessor {
|
|||
|
||||
private OverallBucket outputBucket() {
|
||||
List<OverallBucket.JobInfo> jobs = new ArrayList<>(maxScoreByJob.size());
|
||||
maxScoreByJob.entrySet().stream().forEach(entry -> jobs.add(new OverallBucket.JobInfo(entry.getKey(), entry.getValue())));
|
||||
maxScoreByJob.forEach((key, value) -> jobs.add(new OverallBucket.JobInfo(key, value)));
|
||||
return new OverallBucket(new Date(startTime), bucketSpanSeconds, maxOverallScore, jobs, isInterim);
|
||||
}
|
||||
|
||||
|
@ -65,7 +65,7 @@ public class OverallBucketsAggregator implements OverallBucketsProcessor {
|
|||
|
||||
private void processBucket(OverallBucket bucket) {
|
||||
maxOverallScore = Math.max(maxOverallScore, bucket.getOverallScore());
|
||||
bucket.getJobs().stream().forEach(j -> {
|
||||
bucket.getJobs().forEach(j -> {
|
||||
double currentMax = maxScoreByJob.computeIfAbsent(j.getJobId(), k -> 0.0);
|
||||
if (j.getMaxAnomalyScore() > currentMax) {
|
||||
maxScoreByJob.put(j.getJobId(), j.getMaxAnomalyScore());
|
||||
|
|
|
@ -14,7 +14,6 @@ import java.util.ArrayList;
|
|||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.xpack.ml.job.process.normalizer.Normalizable.ChildType.BUCKET_INFLUENCER;
|
||||
|
||||
|
@ -109,13 +108,10 @@ public class BucketNormalizable extends Normalizable {
|
|||
|
||||
@Override
|
||||
public List<Normalizable> getChildren(ChildType type) {
|
||||
List<Normalizable> children = new ArrayList<>();
|
||||
List<Normalizable> children;
|
||||
switch (type) {
|
||||
case BUCKET_INFLUENCER -> children.addAll(
|
||||
bucket.getBucketInfluencers()
|
||||
.stream()
|
||||
.map(bi -> new BucketInfluencerNormalizable(bi, getOriginatingIndex()))
|
||||
.collect(Collectors.toList())
|
||||
case BUCKET_INFLUENCER -> children = new ArrayList<>(
|
||||
bucket.getBucketInfluencers().stream().map(bi -> new BucketInfluencerNormalizable(bi, getOriginatingIndex())).toList()
|
||||
);
|
||||
default -> throw new IllegalArgumentException("Invalid type: " + type);
|
||||
}
|
||||
|
|
|
@ -108,11 +108,7 @@ public class InternalItemSetMapReduceAggregationTests extends InternalAggregatio
|
|||
|
||||
@Override
|
||||
public WordCounts map(Stream<Tuple<Field, List<Object>>> keyValues, WordCounts wordCounts) {
|
||||
|
||||
keyValues.forEach(v -> {
|
||||
v.v2().stream().forEach(word -> { wordCounts.frequencies.compute((String) word, (k, c) -> (c == null) ? 1 : c + 1); });
|
||||
});
|
||||
|
||||
keyValues.forEach(v -> v.v2().forEach(word -> wordCounts.frequencies.merge((String) word, 1L, Long::sum)));
|
||||
return wordCounts;
|
||||
}
|
||||
|
||||
|
|
|
@ -704,7 +704,7 @@ public class IndexResolver {
|
|||
for (String concreteIndex : concreteIndices) {
|
||||
if (aliases.containsKey(concreteIndex)) {
|
||||
List<AliasMetadata> concreteIndexAliases = aliases.get(concreteIndex);
|
||||
concreteIndexAliases.stream().forEach(e -> uniqueAliases.add(e.alias()));
|
||||
concreteIndexAliases.forEach(e -> uniqueAliases.add(e.alias()));
|
||||
}
|
||||
}
|
||||
concreteIndices.addAll(uniqueAliases);
|
||||
|
|
|
@ -8,12 +8,12 @@ package org.elasticsearch.xpack.ql.type;
|
|||
|
||||
import java.math.BigInteger;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static java.util.stream.Collectors.toMap;
|
||||
import static java.util.stream.Collectors.toUnmodifiableMap;
|
||||
|
@ -53,7 +53,7 @@ public final class DataTypes {
|
|||
public static final DataType NESTED = new DataType("nested", 0, false, false, false);
|
||||
//end::noformat
|
||||
|
||||
private static final Collection<DataType> TYPES = Arrays.asList(
|
||||
private static final Collection<DataType> TYPES = Stream.of(
|
||||
UNSUPPORTED,
|
||||
NULL,
|
||||
BOOLEAN,
|
||||
|
@ -74,7 +74,7 @@ public final class DataTypes {
|
|||
BINARY,
|
||||
OBJECT,
|
||||
NESTED
|
||||
).stream().sorted(Comparator.comparing(DataType::typeName)).toList();
|
||||
).sorted(Comparator.comparing(DataType::typeName)).toList();
|
||||
|
||||
private static final Map<String, DataType> NAME_TO_TYPE = TYPES.stream().collect(toUnmodifiableMap(DataType::typeName, t -> t));
|
||||
|
||||
|
|
|
@ -209,7 +209,7 @@ public class TransportRollupSearchAction extends TransportAction<SearchRequest,
|
|||
Set<RollupJobCaps> validatedCaps = new HashSet<>();
|
||||
sourceAgg.getAggregatorFactories()
|
||||
.forEach(agg -> validatedCaps.addAll(RollupJobIdentifierUtils.findBestJobs(agg, context.getJobCaps())));
|
||||
List<String> jobIds = validatedCaps.stream().map(RollupJobCaps::getJobID).collect(Collectors.toList());
|
||||
List<String> jobIds = validatedCaps.stream().map(RollupJobCaps::getJobID).toList();
|
||||
|
||||
for (AggregationBuilder agg : sourceAgg.getAggregatorFactories()) {
|
||||
|
||||
|
|
|
@ -39,7 +39,6 @@ import java.util.Collections;
|
|||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg;
|
||||
import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||
|
@ -343,7 +342,7 @@ public class PinnedQueryBuilder extends AbstractQueryBuilder<PinnedQueryBuilder>
|
|||
if (idField == null) {
|
||||
return new MatchNoDocsQuery("No mappings");
|
||||
}
|
||||
List<Item> items = (docs != null) ? docs : ids.stream().map(id -> new Item(id)).collect(Collectors.toList());
|
||||
List<Item> items = (docs != null) ? docs : ids.stream().map(id -> new Item(id)).toList();
|
||||
if (items.isEmpty()) {
|
||||
return new CappedScoreQuery(organicQuery.toQuery(context), MAX_ORGANIC_SCORE);
|
||||
} else {
|
||||
|
|
|
@ -94,7 +94,7 @@ public class SearchableSnapshotsStatsResponse extends BroadcastResponse {
|
|||
.map(ShardRouting::index)
|
||||
.sorted(Index.COMPARE_BY_NAME)
|
||||
.distinct()
|
||||
.collect(toList());
|
||||
.toList();
|
||||
|
||||
for (Index index : indices) {
|
||||
builder.startObject(index.getName());
|
||||
|
@ -117,7 +117,7 @@ public class SearchableSnapshotsStatsResponse extends BroadcastResponse {
|
|||
List<SearchableSnapshotShardStats> listOfStats = getStats().stream()
|
||||
.filter(dirStats -> dirStats.getShardRouting().index().equals(index))
|
||||
.sorted(Comparator.comparingInt(dir -> dir.getShardRouting().getId()))
|
||||
.collect(Collectors.toList());
|
||||
.toList();
|
||||
|
||||
int minShard = listOfStats.stream()
|
||||
.map(stat -> stat.getShardRouting().getId())
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue