Cleanup Stream usage in various spots (#97306)

Lots of spots where we did weird things around streams like redundant stream creation, redundant collecting
before adding all the collected elements to another collection or so, redundant streams for joining strings
and using less efficient `Collectors.toList` and in a few cases also incorrectly relying on the result being mutable.
This commit is contained in:
Armin Braun 2023-07-03 14:24:57 +02:00 committed by GitHub
parent 5eeaecd9cf
commit 63e64ae61b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
117 changed files with 321 additions and 442 deletions

View file

@ -50,15 +50,11 @@ import java.io.Writer;
import java.nio.file.Files; import java.nio.file.Files;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import javax.inject.Inject; import javax.inject.Inject;
import java.io.Serializable; import java.io.Serializable;
import javax.inject.Inject;
/** /**
* Checks files for license headers.. * Checks files for license headers..
*/ */
@ -193,9 +189,8 @@ public abstract class LicenseHeadersTask extends DefaultTask {
boolean unApprovedLicenses = stats.getNumUnApproved() > 0; boolean unApprovedLicenses = stats.getNumUnApproved() > 0;
if (unknownLicenses || unApprovedLicenses) { if (unknownLicenses || unApprovedLicenses) {
getLogger().error("The following files contain unapproved license headers:"); getLogger().error("The following files contain unapproved license headers:");
unapprovedFiles(repFile).stream().forEachOrdered(unapprovedFile -> getLogger().error(unapprovedFile)); unapprovedFiles(repFile).forEach(getLogger()::error);
throw new GradleException("Check failed. License header problems were found. Full details: " + throw new GradleException("Check failed. License header problems were found. Full details: " + repFile.getAbsolutePath());
repFile.getAbsolutePath());
} }
} }

View file

@ -34,6 +34,7 @@ import java.util.Map;
import java.util.Optional; import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.inject.Inject; import javax.inject.Inject;
@ -287,7 +288,7 @@ public abstract class DockerSupportService implements BuildService<DockerSupport
*/ */
private Optional<String> getDockerPath() { private Optional<String> getDockerPath() {
// Check if the Docker binary exists // Check if the Docker binary exists
return List.of(DOCKER_BINARIES).stream().filter(path -> new File(path).exists()).findFirst(); return Stream.of(DOCKER_BINARIES).filter(path -> new File(path).exists()).findFirst();
} }
/** /**
@ -298,7 +299,7 @@ public abstract class DockerSupportService implements BuildService<DockerSupport
*/ */
private Optional<String> getDockerComposePath() { private Optional<String> getDockerComposePath() {
// Check if the Docker binary exists // Check if the Docker binary exists
return List.of(DOCKER_COMPOSE_BINARIES).stream().filter(path -> new File(path).exists()).findFirst(); return Stream.of(DOCKER_COMPOSE_BINARIES).filter(path -> new File(path).exists()).findFirst();
} }
private void throwDockerRequiredException(final String message) { private void throwDockerRequiredException(final String message) {

View file

@ -114,24 +114,19 @@ public abstract class ForbiddenPatternsTask extends DefaultTask {
} catch (UncheckedIOException e) { } catch (UncheckedIOException e) {
throw new IllegalArgumentException("Failed to read " + f + " as UTF_8", e); throw new IllegalArgumentException("Failed to read " + f + " as UTF_8", e);
} }
List<Integer> invalidLines = IntStream.range(0, lines.size())
.filter(i -> allPatterns.matcher(lines.get(i)).find())
.boxed()
.collect(Collectors.toList());
URI baseUri = getRootDir().orElse(projectLayout.getProjectDirectory().getAsFile()).get().toURI(); URI baseUri = getRootDir().orElse(projectLayout.getProjectDirectory().getAsFile()).get().toURI();
String path = baseUri.relativize(f.toURI()).toString(); String path = baseUri.relativize(f.toURI()).toString();
failures.addAll( IntStream.range(0, lines.size())
invalidLines.stream() .filter(i -> allPatterns.matcher(lines.get(i)).find())
.map(l -> new AbstractMap.SimpleEntry<>(l + 1, lines.get(l))) .mapToObj(l -> new AbstractMap.SimpleEntry<>(l + 1, lines.get(l)))
.flatMap( .flatMap(
kv -> patterns.entrySet() kv -> patterns.entrySet()
.stream() .stream()
.filter(p -> Pattern.compile(p.getValue()).matcher(kv.getValue()).find()) .filter(p -> Pattern.compile(p.getValue()).matcher(kv.getValue()).find())
.map(p -> "- " + p.getKey() + " on line " + kv.getKey() + " of " + path) .map(p -> "- " + p.getKey() + " on line " + kv.getKey() + " of " + path)
) )
.collect(Collectors.toList()) .forEach(failures::add);
);
} }
if (failures.isEmpty() == false) { if (failures.isEmpty() == false) {
throw new GradleException("Found invalid patterns:\n" + String.join("\n", failures)); throw new GradleException("Found invalid patterns:\n" + String.join("\n", failures));

View file

@ -159,8 +159,7 @@ public class JavaModulePrecommitTask extends PrecommitTask {
return ModuleFinder.of(filePath.toPath()) return ModuleFinder.of(filePath.toPath())
.findAll() .findAll()
.stream() .stream()
.sorted(Comparator.comparing(ModuleReference::descriptor)) .min(Comparator.comparing(ModuleReference::descriptor))
.findFirst()
.orElseThrow(() -> new GradleException("module not found in " + filePath)); .orElseThrow(() -> new GradleException("module not found in " + filePath));
} }
} }

View file

@ -48,7 +48,6 @@ import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.TreeSet; import java.util.TreeSet;
import java.util.function.Consumer; import java.util.function.Consumer;
import java.util.stream.Collectors;
import javax.inject.Inject; import javax.inject.Inject;
@ -234,7 +233,7 @@ public class SplitPackagesAuditTask extends DefaultTask {
String lastPackageName = null; String lastPackageName = null;
Set<String> currentClasses = null; Set<String> currentClasses = null;
boolean filterErrorsFound = false; boolean filterErrorsFound = false;
for (String fqcn : getParameters().getIgnoreClasses().get().stream().sorted().collect(Collectors.toList())) { for (String fqcn : getParameters().getIgnoreClasses().get().stream().sorted().toList()) {
int lastDot = fqcn.lastIndexOf('.'); int lastDot = fqcn.lastIndexOf('.');
if (lastDot == -1) { if (lastDot == -1) {
LOGGER.error("Missing package in classname in split package ignores: " + fqcn); LOGGER.error("Missing package in classname in split package ignores: " + fqcn);

View file

@ -38,6 +38,7 @@ import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.function.Predicate; import java.util.function.Predicate;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.inject.Inject; import javax.inject.Inject;
@ -128,7 +129,7 @@ public abstract class TestingConventionsCheckTask extends PrecommitTask {
var mismatchingBaseClasses = testClassesCandidate.stream() var mismatchingBaseClasses = testClassesCandidate.stream()
.filter(testClassDefaultPredicate) .filter(testClassDefaultPredicate)
.filter(TestingConventionsCheckWorkAction::seemsLikeATest) .filter(TestingConventionsCheckWorkAction::seemsLikeATest)
.collect(Collectors.toList()); .toList();
if (mismatchingBaseClasses.isEmpty() == false) { if (mismatchingBaseClasses.isEmpty() == false) {
throw new GradleException( throw new GradleException(
"Following test classes do not extend any supported base class:\n\t" "Following test classes do not extend any supported base class:\n\t"
@ -141,7 +142,7 @@ public abstract class TestingConventionsCheckTask extends PrecommitTask {
// ensure base class matching do match suffix // ensure base class matching do match suffix
var matchingBaseClassNotMatchingSuffix = matchingBaseClass.stream() var matchingBaseClassNotMatchingSuffix = matchingBaseClass.stream()
.filter(c -> suffixes.stream().allMatch(s -> c.getName().endsWith(s) == false)) .filter(c -> suffixes.stream().allMatch(s -> c.getName().endsWith(s) == false))
.collect(Collectors.toList()); .toList();
if (matchingBaseClassNotMatchingSuffix.isEmpty() == false) { if (matchingBaseClassNotMatchingSuffix.isEmpty() == false) {
throw new GradleException( throw new GradleException(
"Following test classes do not match naming convention to use suffix " "Following test classes do not match naming convention to use suffix "
@ -202,8 +203,7 @@ public abstract class TestingConventionsCheckTask extends PrecommitTask {
} }
private static boolean isAnnotated(Method method, Class<?> annotation) { private static boolean isAnnotated(Method method, Class<?> annotation) {
return List.of(method.getAnnotations()) return Stream.of(method.getAnnotations())
.stream()
.anyMatch(presentAnnotation -> annotation.isAssignableFrom(presentAnnotation.getClass())); .anyMatch(presentAnnotation -> annotation.isAssignableFrom(presentAnnotation.getClass()));
} }

View file

@ -389,12 +389,11 @@ public class DistroTestPlugin implements Plugin<Project> {
List<ElasticsearchDistribution> currentDistros = new ArrayList<>(); List<ElasticsearchDistribution> currentDistros = new ArrayList<>();
for (Architecture architecture : Architecture.values()) { for (Architecture architecture : Architecture.values()) {
ALL_INTERNAL.stream() ALL_INTERNAL.forEach(
.forEach( type -> currentDistros.add(
type -> currentDistros.add( createDistro(distributions, architecture, type, null, true, VersionProperties.getElasticsearch())
createDistro(distributions, architecture, type, null, true, VersionProperties.getElasticsearch()) )
) );
);
} }
for (Architecture architecture : Architecture.values()) { for (Architecture architecture : Architecture.values()) {

View file

@ -38,13 +38,13 @@ public class RestTestTransformer {
List<RestTestTransformGlobalSetup> setupTransforms = transformations.stream() List<RestTestTransformGlobalSetup> setupTransforms = transformations.stream()
.filter(transform -> transform instanceof RestTestTransformGlobalSetup) .filter(transform -> transform instanceof RestTestTransformGlobalSetup)
.map(transform -> (RestTestTransformGlobalSetup) transform) .map(transform -> (RestTestTransformGlobalSetup) transform)
.collect(Collectors.toList()); .toList();
// Collect any global teardown transformations // Collect any global teardown transformations
List<RestTestTransformGlobalTeardown> teardownTransforms = transformations.stream() List<RestTestTransformGlobalTeardown> teardownTransforms = transformations.stream()
.filter(transform -> transform instanceof RestTestTransformGlobalTeardown) .filter(transform -> transform instanceof RestTestTransformGlobalTeardown)
.map(transform -> (RestTestTransformGlobalTeardown) transform) .map(transform -> (RestTestTransformGlobalTeardown) transform)
.collect(Collectors.toList()); .toList();
// Collect any transformations that are identified by an object key. // Collect any transformations that are identified by an object key.
Map<String, List<RestTestTransformByParentObject>> objectKeyFinders = transformations.stream() Map<String, List<RestTestTransformByParentObject>> objectKeyFinders = transformations.stream()

View file

@ -75,9 +75,8 @@ public abstract class AdoptiumJdkToolchainResolver extends AbstractCustomJavaToo
return Optional.of( return Optional.of(
Lists.newArrayList(versionsNode.iterator()) Lists.newArrayList(versionsNode.iterator())
.stream() .stream()
.map(node -> toVersionInfo(node)) .map(this::toVersionInfo)
.sorted(Comparator.comparing(AdoptiumVersionInfo::semver).reversed()) .max(Comparator.comparing(AdoptiumVersionInfo::semver))
.findFirst()
.get() .get()
); );
} catch (FileNotFoundException e) { } catch (FileNotFoundException e) {

View file

@ -89,7 +89,7 @@ public class VagrantBasePlugin implements Plugin<Project> {
} }
String version = matcher.group(1); String version = matcher.group(1);
List<Integer> versionParts = Stream.of(version.split("\\.")).map(Integer::parseInt).collect(Collectors.toList()); List<Integer> versionParts = Stream.of(version.split("\\.")).map(Integer::parseInt).toList();
for (int i = 0; i < minVersion.length; ++i) { for (int i = 0; i < minVersion.length; ++i) {
int found = versionParts.get(i); int found = versionParts.get(i);
if (found > minVersion[i]) { if (found > minVersion[i]) {

View file

@ -17,7 +17,6 @@ import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.function.UnaryOperator; import java.util.function.UnaryOperator;
import java.util.stream.Collectors;
import static org.elasticsearch.gradle.internal.vagrant.VagrantMachine.convertLinuxPath; import static org.elasticsearch.gradle.internal.vagrant.VagrantMachine.convertLinuxPath;
import static org.elasticsearch.gradle.internal.vagrant.VagrantMachine.convertWindowsPath; import static org.elasticsearch.gradle.internal.vagrant.VagrantMachine.convertWindowsPath;
@ -71,7 +70,7 @@ public abstract class VagrantShellTask extends DefaultTask {
script.add("try {"); script.add("try {");
script.add("cd " + convertWindowsPath(buildLayout.getRootDirectory(), buildLayout.getRootDirectory().toString())); script.add("cd " + convertWindowsPath(buildLayout.getRootDirectory(), buildLayout.getRootDirectory().toString()));
extension.getVmEnv().forEach((k, v) -> script.add("$Env:" + k + " = \"" + v + "\"")); extension.getVmEnv().forEach((k, v) -> script.add("$Env:" + k + " = \"" + v + "\""));
script.addAll(getWindowsScript().stream().map(s -> " " + s).collect(Collectors.toList())); script.addAll(getWindowsScript().stream().map(s -> " " + s).toList());
script.addAll( script.addAll(
Arrays.asList( Arrays.asList(
" exit $LASTEXITCODE", " exit $LASTEXITCODE",

View file

@ -16,7 +16,6 @@ import java.nio.file.Path;
import java.nio.file.Paths; import java.nio.file.Paths;
import java.util.Comparator; import java.util.Comparator;
import java.util.List; import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
/** /**
@ -62,7 +61,7 @@ public class Reaper implements Closeable {
private void reap() { private void reap() {
try (Stream<Path> stream = Files.list(inputDir)) { try (Stream<Path> stream = Files.list(inputDir)) {
final List<Path> inputFiles = stream.filter(p -> p.getFileName().toString().endsWith(".cmd")).collect(Collectors.toList()); final List<Path> inputFiles = stream.filter(p -> p.getFileName().toString().endsWith(".cmd")).toList();
for (Path inputFile : inputFiles) { for (Path inputFile : inputFiles) {
System.out.println("Process file: " + inputFile); System.out.println("Process file: " + inputFile);

View file

@ -56,7 +56,7 @@ public class LazyPropertyList<T> extends AbstractLazyPropertyCollection implemen
@Override @Override
public <T1> T1[] toArray(T1[] a) { public <T1> T1[] toArray(T1[] a) {
return delegate.stream().peek(this::validate).map(PropertyListEntry::getValue).collect(Collectors.toList()).toArray(a); return delegate.stream().peek(this::validate).map(PropertyListEntry::getValue).toList().toArray(a);
} }
@Override @Override
@ -79,7 +79,7 @@ public class LazyPropertyList<T> extends AbstractLazyPropertyCollection implemen
@Override @Override
public boolean containsAll(Collection<?> c) { public boolean containsAll(Collection<?> c) {
return delegate.stream().map(PropertyListEntry::getValue).collect(Collectors.toList()).containsAll(c); return delegate.stream().map(PropertyListEntry::getValue).collect(Collectors.toSet()).containsAll(c);
} }
@Override @Override

View file

@ -136,7 +136,7 @@ public class RunTask extends DefaultTestClustersTask {
entry -> entry.getValue().toString() entry -> entry.getValue().toString()
) )
); );
boolean singleNode = getClusters().stream().flatMap(c -> c.getNodes().stream()).count() == 1; boolean singleNode = getClusters().stream().mapToLong(c -> c.getNodes().size()).sum() == 1;
final Function<ElasticsearchNode, Path> getDataPath; final Function<ElasticsearchNode, Path> getDataPath;
if (singleNode) { if (singleNode) {
getDataPath = n -> dataDir; getDataPath = n -> dataDir;

View file

@ -26,7 +26,7 @@ public interface TestClustersAware extends Task {
} }
cluster.getNodes() cluster.getNodes()
.all(node -> node.getDistributions().stream().forEach(distro -> dependsOn(getProject().provider(() -> distro.maybeFreeze())))); .all(node -> node.getDistributions().forEach(distro -> dependsOn(getProject().provider(() -> distro.maybeFreeze()))));
cluster.getNodes().all(node -> dependsOn((Callable<Collection<Configuration>>) node::getPluginAndModuleConfigurations)); cluster.getNodes().all(node -> dependsOn((Callable<Collection<Configuration>>) node::getPluginAndModuleConfigurations));
getClusters().add(cluster); getClusters().add(cluster);
} }

View file

@ -18,7 +18,6 @@ import java.lang.management.GarbageCollectorMXBean;
import java.lang.management.ManagementFactory; import java.lang.management.ManagementFactory;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.stream.Collectors;
public abstract class AbstractBenchmark<T extends Closeable> { public abstract class AbstractBenchmark<T extends Closeable> {
private static final int SEARCH_BENCHMARK_ITERATIONS = 10_000; private static final int SEARCH_BENCHMARK_ITERATIONS = 10_000;
@ -92,7 +91,7 @@ public abstract class AbstractBenchmark<T extends Closeable> {
String benchmarkTargetHost = args[1]; String benchmarkTargetHost = args[1];
String indexName = args[2]; String indexName = args[2];
String searchBody = args[3]; String searchBody = args[3];
List<Integer> throughputRates = Arrays.asList(args[4].split(",")).stream().map(Integer::valueOf).collect(Collectors.toList()); List<Integer> throughputRates = Arrays.stream(args[4].split(",")).map(Integer::valueOf).toList();
T client = client(benchmarkTargetHost); T client = client(benchmarkTargetHost);

View file

@ -33,7 +33,6 @@ import java.security.DigestInputStream;
import java.security.MessageDigest; import java.security.MessageDigest;
import java.util.Arrays; import java.util.Arrays;
import java.util.Locale; import java.util.Locale;
import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
import java.util.zip.GZIPOutputStream; import java.util.zip.GZIPOutputStream;
@ -74,7 +73,7 @@ public class GeoIpCli extends Command {
return; return;
} }
try (Stream<Path> files = Files.list(source)) { try (Stream<Path> files = Files.list(source)) {
for (Path path : files.filter(p -> p.getFileName().toString().endsWith(".tgz")).collect(Collectors.toList())) { for (Path path : files.filter(p -> p.getFileName().toString().endsWith(".tgz")).toList()) {
Files.copy(path, target.resolve(path.getFileName()), StandardCopyOption.REPLACE_EXISTING); Files.copy(path, target.resolve(path.getFileName()), StandardCopyOption.REPLACE_EXISTING);
} }
} }
@ -82,7 +81,7 @@ public class GeoIpCli extends Command {
private void packDatabasesToTgz(Terminal terminal, Path source, Path target) throws IOException { private void packDatabasesToTgz(Terminal terminal, Path source, Path target) throws IOException {
try (Stream<Path> files = Files.list(source)) { try (Stream<Path> files = Files.list(source)) {
for (Path path : files.filter(p -> p.getFileName().toString().endsWith(".mmdb")).collect(Collectors.toList())) { for (Path path : files.filter(p -> p.getFileName().toString().endsWith(".mmdb")).toList()) {
String fileName = path.getFileName().toString(); String fileName = path.getFileName().toString();
Path compressedPath = target.resolve(fileName.replaceAll("mmdb$", "") + "tgz"); Path compressedPath = target.resolve(fileName.replaceAll("mmdb$", "") + "tgz");
terminal.println("Found " + fileName + ", will compress it to " + compressedPath.getFileName()); terminal.println("Found " + fileName + ", will compress it to " + compressedPath.getFileName());
@ -111,7 +110,7 @@ public class GeoIpCli extends Command {
XContentGenerator generator = XContentType.JSON.xContent().createGenerator(os) XContentGenerator generator = XContentType.JSON.xContent().createGenerator(os)
) { ) {
generator.writeStartArray(); generator.writeStartArray();
for (Path db : files.filter(p -> p.getFileName().toString().endsWith(".tgz")).collect(Collectors.toList())) { for (Path db : files.filter(p -> p.getFileName().toString().endsWith(".tgz")).toList()) {
terminal.println("Adding " + db.getFileName() + " to overview.json"); terminal.println("Adding " + db.getFileName() + " to overview.json");
MessageDigest md5 = MessageDigests.md5(); MessageDigest md5 = MessageDigests.md5();
try (InputStream dis = new DigestInputStream(new BufferedInputStream(Files.newInputStream(db)), md5)) { try (InputStream dis = new DigestInputStream(new BufferedInputStream(Files.newInputStream(db)), md5)) {

View file

@ -10,11 +10,12 @@ package org.elasticsearch.server.cli;
import java.util.List; import java.util.List;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream;
final class SystemJvmOptions { final class SystemJvmOptions {
static List<String> systemJvmOptions() { static List<String> systemJvmOptions() {
return List.of( return Stream.of(
/* /*
* Cache ttl in seconds for positive DNS lookups noting that this overrides the JDK security property networkaddress.cache.ttl; * Cache ttl in seconds for positive DNS lookups noting that this overrides the JDK security property networkaddress.cache.ttl;
* can be set to -1 to cache forever. * can be set to -1 to cache forever.
@ -61,7 +62,7 @@ final class SystemJvmOptions {
*/ */
"--add-opens=java.base/java.io=org.elasticsearch.preallocate", "--add-opens=java.base/java.io=org.elasticsearch.preallocate",
maybeOverrideDockerCgroup() maybeOverrideDockerCgroup()
).stream().filter(e -> e.isEmpty() == false).collect(Collectors.toList()); ).filter(e -> e.isEmpty() == false).collect(Collectors.toList());
} }
/* /*

View file

@ -108,7 +108,7 @@ public final class EmbeddedImplClassLoader extends SecureClassLoader {
.collect(toUnmodifiableMap(k -> k.getKey().prefix(), Map.Entry::getValue)); .collect(toUnmodifiableMap(k -> k.getKey().prefix(), Map.Entry::getValue));
Map<String, JarMeta> map = new HashMap<>(); Map<String, JarMeta> map = new HashMap<>();
for (var jarMeta : prefixToCodeBase.keySet()) { for (var jarMeta : prefixToCodeBase.keySet()) {
jarMeta.packages().stream().forEach(pkg -> { jarMeta.packages().forEach(pkg -> {
var prev = map.put(pkg, jarMeta); var prev = map.put(pkg, jarMeta);
assert prev == null; assert prev == null;
}); });

View file

@ -72,8 +72,8 @@ class InMemoryModuleFinder implements ModuleFinder {
md.requires().stream().filter(req -> missingModules.contains(req.name()) == false).forEach(builder::requires); md.requires().stream().filter(req -> missingModules.contains(req.name()) == false).forEach(builder::requires);
md.exports().forEach(builder::exports); md.exports().forEach(builder::exports);
md.opens().forEach(builder::opens); md.opens().forEach(builder::opens);
md.provides().stream().forEach(builder::provides); md.provides().forEach(builder::provides);
md.uses().stream().forEach(builder::uses); md.uses().forEach(builder::uses);
builder.packages(md.packages()); builder.packages(md.packages());
return builder.build(); return builder.build();
} }

View file

@ -129,7 +129,6 @@ public final class DissectParser {
Set<String> appendKeyNames = dissectPairs.stream() Set<String> appendKeyNames = dissectPairs.stream()
.filter(dissectPair -> APPEND_MODIFIERS.contains(dissectPair.key().getModifier())) .filter(dissectPair -> APPEND_MODIFIERS.contains(dissectPair.key().getModifier()))
.map(KEY_NAME) .map(KEY_NAME)
.distinct()
.collect(Collectors.toSet()); .collect(Collectors.toSet());
if (appendKeyNames.size() > 0) { if (appendKeyNames.size() > 0) {
List<DissectPair> modifiedMatchPairs = new ArrayList<>(dissectPairs.size()); List<DissectPair> modifiedMatchPairs = new ArrayList<>(dissectPairs.size());

View file

@ -11,7 +11,6 @@ import java.util.Collections;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.Locale; import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.stream.Collectors;
import javax.net.ssl.SSLParameters; import javax.net.ssl.SSLParameters;
@ -82,7 +81,7 @@ public enum SslClientAuthenticationMode {
public static SslClientAuthenticationMode parse(String value) { public static SslClientAuthenticationMode parse(String value) {
final SslClientAuthenticationMode mode = LOOKUP.get(value.toLowerCase(Locale.ROOT)); final SslClientAuthenticationMode mode = LOOKUP.get(value.toLowerCase(Locale.ROOT));
if (mode == null) { if (mode == null) {
final String allowedValues = LOOKUP.keySet().stream().collect(Collectors.joining(",")); final String allowedValues = String.join(",", LOOKUP.keySet());
throw new SslConfigException( throw new SslConfigException(
"could not resolve ssl client authentication, unknown value [" + value + "], recognised values are [" + allowedValues + "]" "could not resolve ssl client authentication, unknown value [" + value + "], recognised values are [" + allowedValues + "]"
); );

View file

@ -236,9 +236,7 @@ public class SslDiagnostics {
if (hostnames.isEmpty()) { if (hostnames.isEmpty()) {
message.append("; the certificate does not have any DNS/IP subject alternative names"); message.append("; the certificate does not have any DNS/IP subject alternative names");
} else { } else {
message.append("; the certificate has subject alternative names [") message.append("; the certificate has subject alternative names [").append(String.join(",", hostnames)).append("]");
.append(hostnames.stream().collect(Collectors.joining(",")))
.append("]");
} }
} }
} catch (CertificateParsingException e) { } catch (CertificateParsingException e) {

View file

@ -11,7 +11,6 @@ import java.util.Collections;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.Locale; import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.stream.Collectors;
/** /**
* Represents the verification mode to be used for SSL connections. * Represents the verification mode to be used for SSL connections.
@ -84,7 +83,7 @@ public enum SslVerificationMode {
public static SslVerificationMode parse(String value) { public static SslVerificationMode parse(String value) {
final SslVerificationMode mode = LOOKUP.get(value.toLowerCase(Locale.ROOT)); final SslVerificationMode mode = LOOKUP.get(value.toLowerCase(Locale.ROOT));
if (mode == null) { if (mode == null) {
final String allowedValues = LOOKUP.keySet().stream().collect(Collectors.joining(",")); final String allowedValues = String.join(",", LOOKUP.keySet());
throw new SslConfigException( throw new SslConfigException(
"could not resolve ssl client verification mode, unknown value [" "could not resolve ssl client verification mode, unknown value ["
+ value + value

View file

@ -1072,7 +1072,7 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes
private Map<String, Integer> bucketCountsAsMap(InternalAutoDateHistogram result) { private Map<String, Integer> bucketCountsAsMap(InternalAutoDateHistogram result) {
Map<String, Integer> map = Maps.newLinkedHashMapWithExpectedSize(result.getBuckets().size()); Map<String, Integer> map = Maps.newLinkedHashMapWithExpectedSize(result.getBuckets().size());
result.getBuckets().stream().forEach(b -> { result.getBuckets().forEach(b -> {
Object old = map.put(b.getKeyAsString(), Math.toIntExact(b.getDocCount())); Object old = map.put(b.getKeyAsString(), Math.toIntExact(b.getDocCount()));
assertNull(old); assertNull(old);
}); });
@ -1081,7 +1081,7 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes
private Map<String, Double> maxAsMap(InternalAutoDateHistogram result) { private Map<String, Double> maxAsMap(InternalAutoDateHistogram result) {
Map<String, Double> map = Maps.newLinkedHashMapWithExpectedSize(result.getBuckets().size()); Map<String, Double> map = Maps.newLinkedHashMapWithExpectedSize(result.getBuckets().size());
result.getBuckets().stream().forEach(b -> { result.getBuckets().forEach(b -> {
Max max = b.getAggregations().get("max"); Max max = b.getAggregations().get("max");
Object old = map.put(b.getKeyAsString(), max.value()); Object old = map.put(b.getKeyAsString(), max.value());
assertNull(old); assertNull(old);

View file

@ -670,14 +670,10 @@ public class UserTreeToXContent extends UserTreeBaseVisitor<ScriptScope> {
if (decorations.isEmpty() == false) { if (decorations.isEmpty() == false) {
builder.startArray(Fields.DECORATIONS); builder.startArray(Fields.DECORATIONS);
List<Class<? extends Decoration>> dkeys = decorations.keySet() decorations.keySet()
.stream() .stream()
.sorted(Comparator.comparing(Class::getName)) .sorted(Comparator.comparing(Class::getName))
.collect(Collectors.toList()); .forEachOrdered(dkey -> DecorationToXContent.ToXContent(decorations.get(dkey), builder));
for (Class<? extends Decoration> dkey : dkeys) {
DecorationToXContent.ToXContent(decorations.get(dkey), builder);
}
builder.endArray(); builder.endArray();
} }
} }

View file

@ -45,7 +45,6 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors;
/** /**
* A {@link FieldMapper} that creates hierarchical joins (parent-join) between documents in the same index. * A {@link FieldMapper} that creates hierarchical joins (parent-join) between documents in the same index.
@ -324,7 +323,7 @@ public final class ParentJoinFieldMapper extends FieldMapper {
.map(mappingLookup::getFieldType) .map(mappingLookup::getFieldType)
.filter(ft -> ft instanceof JoinFieldType) .filter(ft -> ft instanceof JoinFieldType)
.map(MappedFieldType::name) .map(MappedFieldType::name)
.collect(Collectors.toList()); .toList();
if (joinFields.size() > 1) { if (joinFields.size() > 1) {
throw new IllegalArgumentException("Only one [parent-join] field can be defined per index, got " + joinFields); throw new IllegalArgumentException("Only one [parent-join] field can be defined per index, got " + joinFields);
} }

View file

@ -125,8 +125,7 @@ final class QueryAnalyzer {
partialResults.addAll(terms); partialResults.addAll(terms);
} }
if (children.isEmpty() == false) { if (children.isEmpty() == false) {
List<Result> childResults = children.stream().map(ResultBuilder::getResult).collect(Collectors.toList()); children.stream().map(ResultBuilder::getResult).forEach(partialResults::add);
partialResults.addAll(childResults);
} }
if (partialResults.isEmpty()) { if (partialResults.isEmpty()) {
return verified ? Result.MATCH_NONE : Result.UNKNOWN; return verified ? Result.MATCH_NONE : Result.UNKNOWN;
@ -243,7 +242,7 @@ final class QueryAnalyzer {
} }
private static Result handleConjunction(List<Result> conjunctionsWithUnknowns) { private static Result handleConjunction(List<Result> conjunctionsWithUnknowns) {
List<Result> conjunctions = conjunctionsWithUnknowns.stream().filter(r -> r.isUnknown() == false).collect(Collectors.toList()); List<Result> conjunctions = conjunctionsWithUnknowns.stream().filter(r -> r.isUnknown() == false).toList();
if (conjunctions.isEmpty()) { if (conjunctions.isEmpty()) {
if (conjunctionsWithUnknowns.isEmpty()) { if (conjunctionsWithUnknowns.isEmpty()) {
throw new IllegalArgumentException("Must have at least one conjunction sub result"); throw new IllegalArgumentException("Must have at least one conjunction sub result");

View file

@ -19,7 +19,6 @@ import org.elasticsearch.xcontent.XContentParser;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.Comparator;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.OptionalInt; import java.util.OptionalInt;
@ -133,8 +132,10 @@ public class DiscountedCumulativeGain implements EvaluationMetric {
double idcg = 0; double idcg = 0;
if (normalize) { if (normalize) {
List<Integer> allRatings = ratedDocs.stream().mapToInt(RatedDocument::getRating).boxed().collect(Collectors.toList()); List<Integer> allRatings = ratedDocs.stream()
Collections.sort(allRatings, Comparator.nullsLast(Collections.reverseOrder())); .map(RatedDocument::getRating)
.sorted(Collections.reverseOrder())
.collect(Collectors.toList());
idcg = computeDCG(allRatings.subList(0, Math.min(ratingsInSearchHits.size(), allRatings.size()))); idcg = computeDCG(allRatings.subList(0, Math.min(ratingsInSearchHits.size(), allRatings.size())));
if (idcg != 0) { if (idcg != 0) {
result = dcg / idcg; result = dcg / idcg;

View file

@ -126,7 +126,7 @@ public class TransportNodesReloadSecureSettingsAction extends TransportNodesActi
final Settings settingsWithKeystore = Settings.builder().put(environment.settings(), false).setSecureSettings(keystore).build(); final Settings settingsWithKeystore = Settings.builder().put(environment.settings(), false).setSecureSettings(keystore).build();
final List<Exception> exceptions = new ArrayList<>(); final List<Exception> exceptions = new ArrayList<>();
// broadcast the new settings object (with the open embedded keystore) to all reloadable plugins // broadcast the new settings object (with the open embedded keystore) to all reloadable plugins
pluginsService.filterPlugins(ReloadablePlugin.class).stream().forEach(p -> { pluginsService.filterPlugins(ReloadablePlugin.class).forEach(p -> {
try { try {
p.reload(settingsWithKeystore); p.reload(settingsWithKeystore);
} catch (final Exception e) { } catch (final Exception e) {

View file

@ -24,7 +24,6 @@ import org.elasticsearch.xcontent.XContentParserConfiguration;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
@ -166,10 +165,10 @@ public class ReservedComposableIndexTemplateAction
} }
Set<String> composableEntities = composables.stream().map(r -> reservedComposableIndexName(r.name())).collect(Collectors.toSet()); Set<String> composableEntities = composables.stream().map(r -> reservedComposableIndexName(r.name())).collect(Collectors.toSet());
Set<String> composablesToDelete = new HashSet<>( Set<String> composablesToDelete = prevState.keys()
prevState.keys().stream().filter(k -> k.startsWith(COMPOSABLE_PREFIX)).collect(Collectors.toSet()) .stream()
); .filter(k -> k.startsWith(COMPOSABLE_PREFIX) && composableEntities.contains(k) == false)
composablesToDelete.removeAll(composableEntities); .collect(Collectors.toSet());
// 3. delete composable index templates (this will fail on attached data streams, unless we added a higher priority one) // 3. delete composable index templates (this will fail on attached data streams, unless we added a higher priority one)
if (composablesToDelete.isEmpty() == false) { if (composablesToDelete.isEmpty() == false) {
@ -183,9 +182,7 @@ public class ReservedComposableIndexTemplateAction
} }
Set<String> componentEntities = components.stream().map(r -> reservedComponentName(r.name())).collect(Collectors.toSet()); Set<String> componentEntities = components.stream().map(r -> reservedComponentName(r.name())).collect(Collectors.toSet());
Set<String> componentsToDelete = new HashSet<>( Set<String> componentsToDelete = prevState.keys().stream().filter(k -> k.startsWith(COMPONENT_PREFIX)).collect(Collectors.toSet());
prevState.keys().stream().filter(k -> k.startsWith(COMPONENT_PREFIX)).collect(Collectors.toSet())
);
componentsToDelete.removeAll(componentEntities); componentsToDelete.removeAll(componentEntities);
// 5. delete component templates (this will check if there are any related composable index templates and fail) // 5. delete component templates (this will check if there are any related composable index templates and fail)

View file

@ -17,8 +17,6 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Locale; import java.util.Locale;
import static java.util.stream.Collectors.toList;
public class SuggestingErrorOnUnknown implements ErrorOnUnknown { public class SuggestingErrorOnUnknown implements ErrorOnUnknown {
@Override @Override
public String errorMessage(String parserName, String unknownField, Iterable<String> candidates) { public String errorMessage(String parserName, String unknownField, Iterable<String> candidates) {
@ -55,7 +53,7 @@ public class SuggestingErrorOnUnknown implements ErrorOnUnknown {
} }
return a.v2().compareTo(b.v2()); return a.v2().compareTo(b.v2());
}); });
List<String> keys = scored.stream().map(Tuple::v2).collect(toList()); List<String> keys = scored.stream().map(Tuple::v2).toList();
StringBuilder builder = new StringBuilder(" did you mean "); StringBuilder builder = new StringBuilder(" did you mean ");
if (keys.size() == 1) { if (keys.size() == 1) {
builder.append("[").append(keys.get(0)).append("]"); builder.append("[").append(keys.get(0)).append("]");

View file

@ -97,7 +97,7 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
* @param values The terms * @param values The terms
*/ */
public TermsQueryBuilder(String fieldName, int... values) { public TermsQueryBuilder(String fieldName, int... values) {
this(fieldName, values != null ? Arrays.stream(values).mapToObj(s -> s).toList() : (Iterable<?>) null); this(fieldName, values != null ? Arrays.stream(values).boxed().toList() : null);
} }
/** /**
@ -107,7 +107,7 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
* @param values The terms * @param values The terms
*/ */
public TermsQueryBuilder(String fieldName, long... values) { public TermsQueryBuilder(String fieldName, long... values) {
this(fieldName, values != null ? Arrays.stream(values).mapToObj(s -> s).toList() : (Iterable<?>) null); this(fieldName, values != null ? Arrays.stream(values).boxed().toList() : null);
} }
/** /**
@ -117,7 +117,7 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
* @param values The terms * @param values The terms
*/ */
public TermsQueryBuilder(String fieldName, float... values) { public TermsQueryBuilder(String fieldName, float... values) {
this(fieldName, values != null ? IntStream.range(0, values.length).mapToObj(i -> values[i]).toList() : (Iterable<?>) null); this(fieldName, values != null ? IntStream.range(0, values.length).mapToObj(i -> values[i]).toList() : null);
} }
/** /**
@ -127,7 +127,7 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
* @param values The terms * @param values The terms
*/ */
public TermsQueryBuilder(String fieldName, double... values) { public TermsQueryBuilder(String fieldName, double... values) {
this(fieldName, values != null ? Arrays.stream(values).mapToObj(s -> s).toList() : (Iterable<?>) null); this(fieldName, values != null ? Arrays.stream(values).boxed().toList() : null);
} }
/** /**
@ -137,7 +137,7 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
* @param values The terms * @param values The terms
*/ */
public TermsQueryBuilder(String fieldName, Object... values) { public TermsQueryBuilder(String fieldName, Object... values) {
this(fieldName, values != null ? Arrays.asList(values) : (Iterable<?>) null); this(fieldName, values != null ? Arrays.asList(values) : null);
} }
/** /**

View file

@ -110,7 +110,7 @@ public class ShardFieldUsageTracker {
@Override @Override
public void close() { public void close() {
usages.entrySet().stream().forEach(e -> { usages.entrySet().forEach(e -> {
InternalFieldStats fieldStats = perFieldStats.computeIfAbsent(e.getKey(), f -> new InternalFieldStats()); InternalFieldStats fieldStats = perFieldStats.computeIfAbsent(e.getKey(), f -> new InternalFieldStats());
PerField pf = e.getValue(); PerField pf = e.getValue();
boolean any = false; boolean any = false;

View file

@ -59,7 +59,6 @@ import java.util.function.LongConsumer;
import java.util.function.LongSupplier; import java.util.function.LongSupplier;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.core.Strings.format;
@ -375,7 +374,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
return false; return false;
} }
}). // find all inner callers including Translog subclasses }). // find all inner callers including Translog subclasses
collect(Collectors.toList()); toList();
// the list of inner callers should be either empty or should contain closeOnTragicEvent method // the list of inner callers should be either empty or should contain closeOnTragicEvent method
return frames.isEmpty() || frames.stream().anyMatch(f -> f.getMethodName().equals("closeOnTragicEvent")); return frames.isEmpty() || frames.stream().anyMatch(f -> f.getMethodName().equals("closeOnTragicEvent"));
} }

View file

@ -73,8 +73,8 @@ public class ModuleSupport {
Predicate<String> isPackageInParentLayers Predicate<String> isPackageInParentLayers
) throws IOException { ) throws IOException {
var builder = ModuleDescriptor.newOpenModule(name); // open module, for now var builder = ModuleDescriptor.newOpenModule(name); // open module, for now
requires.stream().forEach(builder::requires); requires.forEach(builder::requires);
uses.stream().forEach(builder::uses); uses.forEach(builder::uses);
// scan the names of the entries in the JARs // scan the names of the entries in the JARs
Set<String> pkgs = new HashSet<>(); Set<String> pkgs = new HashSet<>();

View file

@ -30,8 +30,6 @@ import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.function.ToLongFunction; import java.util.function.ToLongFunction;
import static java.util.stream.Collectors.toList;
/** /**
* Implementations for {@link Bucket} ordering strategies. * Implementations for {@link Bucket} ordering strategies.
*/ */
@ -187,7 +185,7 @@ public abstract class InternalOrder extends BucketOrder {
public <T extends Bucket> Comparator<T> partiallyBuiltBucketComparator(ToLongFunction<T> ordinalReader, Aggregator aggregator) { public <T extends Bucket> Comparator<T> partiallyBuiltBucketComparator(ToLongFunction<T> ordinalReader, Aggregator aggregator) {
List<Comparator<T>> comparators = orderElements.stream() List<Comparator<T>> comparators = orderElements.stream()
.map(oe -> oe.partiallyBuiltBucketComparator(ordinalReader, aggregator)) .map(oe -> oe.partiallyBuiltBucketComparator(ordinalReader, aggregator))
.collect(toList()); .toList();
return (lhs, rhs) -> { return (lhs, rhs) -> {
for (Comparator<T> c : comparators) { for (Comparator<T> c : comparators) {
int result = c.compare(lhs, rhs); int result = c.compare(lhs, rhs);
@ -201,7 +199,7 @@ public abstract class InternalOrder extends BucketOrder {
@Override @Override
public Comparator<Bucket> comparator() { public Comparator<Bucket> comparator() {
List<Comparator<Bucket>> comparators = orderElements.stream().map(BucketOrder::comparator).collect(toList()); List<Comparator<Bucket>> comparators = orderElements.stream().map(BucketOrder::comparator).toList();
return (lhs, rhs) -> { return (lhs, rhs) -> {
for (Comparator<Bucket> c : comparators) { for (Comparator<Bucket> c : comparators) {
int result = c.compare(lhs, rhs); int result = c.compare(lhs, rhs);
@ -217,7 +215,7 @@ public abstract class InternalOrder extends BucketOrder {
Comparator<DelayedBucket<? extends Bucket>> delayedBucketComparator() { Comparator<DelayedBucket<? extends Bucket>> delayedBucketComparator() {
List<Comparator<DelayedBucket<? extends Bucket>>> comparators = orderElements.stream() List<Comparator<DelayedBucket<? extends Bucket>>> comparators = orderElements.stream()
.map(BucketOrder::delayedBucketComparator) .map(BucketOrder::delayedBucketComparator)
.collect(toList()); .toList();
return (lhs, rhs) -> { return (lhs, rhs) -> {
for (Comparator<DelayedBucket<? extends Bucket>> c : comparators) { for (Comparator<DelayedBucket<? extends Bucket>> c : comparators) {
int result = c.compare(lhs, rhs); int result = c.compare(lhs, rhs);

View file

@ -21,7 +21,7 @@ import java.util.Comparator;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import static java.util.stream.Collectors.toSet; import static java.util.stream.Collectors.toUnmodifiableSet;
public final class QueryRescorer implements Rescorer { public final class QueryRescorer implements Rescorer {
@ -57,9 +57,7 @@ public final class QueryRescorer implements Rescorer {
TopDocs topNFirstPass = topN(topDocs, rescoreContext.getWindowSize()); TopDocs topNFirstPass = topN(topDocs, rescoreContext.getWindowSize());
// Save doc IDs for which rescoring was applied to be used in score explanation // Save doc IDs for which rescoring was applied to be used in score explanation
Set<Integer> topNDocIDs = Collections.unmodifiableSet( Set<Integer> topNDocIDs = Arrays.stream(topNFirstPass.scoreDocs).map(scoreDoc -> scoreDoc.doc).collect(toUnmodifiableSet());
Arrays.stream(topNFirstPass.scoreDocs).map(scoreDoc -> scoreDoc.doc).collect(toSet())
);
rescoreContext.setRescoredDocs(topNDocIDs); rescoreContext.setRescoredDocs(topNDocIDs);
// Rescore them: // Rescore them:

View file

@ -250,13 +250,13 @@ public class DiscoveryNodesTests extends ESTestCase {
DiscoveryNode masterB = randomBoolean() ? null : RandomPicks.randomFrom(random(), nodesB); DiscoveryNode masterB = randomBoolean() ? null : RandomPicks.randomFrom(random(), nodesB);
DiscoveryNodes.Builder builderA = DiscoveryNodes.builder(); DiscoveryNodes.Builder builderA = DiscoveryNodes.builder();
nodesA.stream().forEach(builderA::add); nodesA.forEach(builderA::add);
final String masterAId = masterA == null ? null : masterA.getId(); final String masterAId = masterA == null ? null : masterA.getId();
builderA.masterNodeId(masterAId); builderA.masterNodeId(masterAId);
builderA.localNodeId(RandomPicks.randomFrom(random(), nodesA).getId()); builderA.localNodeId(RandomPicks.randomFrom(random(), nodesA).getId());
DiscoveryNodes.Builder builderB = DiscoveryNodes.builder(); DiscoveryNodes.Builder builderB = DiscoveryNodes.builder();
nodesB.stream().forEach(builderB::add); nodesB.forEach(builderB::add);
final String masterBId = masterB == null ? null : masterB.getId(); final String masterBId = masterB == null ? null : masterB.getId();
builderB.masterNodeId(masterBId); builderB.masterNodeId(masterBId);
builderB.localNodeId(RandomPicks.randomFrom(random(), nodesB).getId()); builderB.localNodeId(RandomPicks.randomFrom(random(), nodesB).getId());

View file

@ -158,9 +158,9 @@ public class FailedNodeRoutingTests extends ESAllocationTestCase {
// Pick a random subset of primaries to fail // Pick a random subset of primaries to fail
List<FailedShard> shardsToFail = new ArrayList<>(); List<FailedShard> shardsToFail = new ArrayList<>();
List<ShardRouting> failedPrimaries = randomSubsetOf(primaries); List<ShardRouting> failedPrimaries = randomSubsetOf(primaries);
failedPrimaries.stream().forEach(sr -> { failedPrimaries.forEach(
shardsToFail.add(new FailedShard(randomFrom(sr), "failed primary", new Exception(), randomBoolean())); sr -> shardsToFail.add(new FailedShard(randomFrom(sr), "failed primary", new Exception(), randomBoolean()))
}); );
logger.info("--> state before failing shards: {}", state); logger.info("--> state before failing shards: {}", state);
state = cluster.applyFailedShards(state, shardsToFail); state = cluster.applyFailedShards(state, shardsToFail);
@ -173,7 +173,7 @@ public class FailedNodeRoutingTests extends ESAllocationTestCase {
Version newPrimaryVersion = getNodeVersion(newPrimary, compareState); Version newPrimaryVersion = getNodeVersion(newPrimary, compareState);
logger.info("--> new primary is on version {}: {}", newPrimaryVersion, newPrimary); logger.info("--> new primary is on version {}: {}", newPrimaryVersion, newPrimary);
compareState.routingTable().shardRoutingTable(newPrimary.shardId()).shardsWithState(STARTED).stream().forEach(sr -> { compareState.routingTable().shardRoutingTable(newPrimary.shardId()).shardsWithState(STARTED).forEach(sr -> {
Version candidateVer = getNodeVersion(sr, compareState); Version candidateVer = getNodeVersion(sr, compareState);
if (candidateVer != null) { if (candidateVer != null) {
logger.info("--> candidate on {} node; shard routing: {}", candidateVer, sr); logger.info("--> candidate on {} node; shard routing: {}", candidateVer, sr);

View file

@ -82,7 +82,7 @@ public class RangeAggregationBuilderTests extends AbstractXContentSerializingTes
default -> fail(); default -> fail();
} }
RangeAggregationBuilder mutant = new RangeAggregationBuilder(name).keyed(keyed).field(field); RangeAggregationBuilder mutant = new RangeAggregationBuilder(name).keyed(keyed).field(field);
ranges.stream().forEach(mutant::addRange); ranges.forEach(mutant::addRange);
return mutant; return mutant;
} }

View file

@ -34,7 +34,6 @@ import java.util.Set;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import java.util.stream.Collectors;
import static org.elasticsearch.repositories.azure.AzureFixtureHelper.assertValidBlockId; import static org.elasticsearch.repositories.azure.AzureFixtureHelper.assertValidBlockId;
@ -77,7 +76,7 @@ public class AzureHttpHandler implements HttpHandler {
final List<String> blockIds = Arrays.stream(blockList.split("<Latest>")) final List<String> blockIds = Arrays.stream(blockList.split("<Latest>"))
.filter(line -> line.contains("</Latest>")) .filter(line -> line.contains("</Latest>"))
.map(line -> line.substring(0, line.indexOf("</Latest>"))) .map(line -> line.substring(0, line.indexOf("</Latest>")))
.collect(Collectors.toList()); .toList();
final ByteArrayOutputStream blob = new ByteArrayOutputStream(); final ByteArrayOutputStream blob = new ByteArrayOutputStream();
for (String blockId : blockIds) { for (String blockId : blockIds) {

View file

@ -187,7 +187,7 @@ public class CoordinationStateTestCluster {
this.electionStrategy = electionStrategy; this.electionStrategy = electionStrategy;
messages = new ArrayList<>(); messages = new ArrayList<>();
clusterNodes = nodes.stream().map(node -> new ClusterNode(node, electionStrategy)).collect(Collectors.toList()); clusterNodes = nodes.stream().map(node -> new ClusterNode(node, electionStrategy)).toList();
initialConfiguration = randomVotingConfig(); initialConfiguration = randomVotingConfig();
initialValue = randomLong(); initialValue = randomLong();
@ -200,7 +200,7 @@ public class CoordinationStateTestCluster {
} }
void broadcast(DiscoveryNode sourceNode, Object payload) { void broadcast(DiscoveryNode sourceNode, Object payload) {
messages.addAll(clusterNodes.stream().map(cn -> new Message(sourceNode, cn.localNode, payload)).collect(Collectors.toList())); clusterNodes.stream().map(cn -> new Message(sourceNode, cn.localNode, payload)).forEach(messages::add);
} }
Optional<ClusterNode> getNode(DiscoveryNode node) { Optional<ClusterNode> getNode(DiscoveryNode node) {
@ -251,9 +251,7 @@ public class CoordinationStateTestCluster {
} else if (rarely() && rarely()) { } else if (rarely() && rarely()) {
randomFrom(clusterNodes).reboot(); randomFrom(clusterNodes).reboot();
} else if (rarely()) { } else if (rarely()) {
final List<ClusterNode> masterNodes = clusterNodes.stream() final List<ClusterNode> masterNodes = clusterNodes.stream().filter(cn -> cn.state.electionWon()).toList();
.filter(cn -> cn.state.electionWon())
.collect(Collectors.toList());
if (masterNodes.isEmpty() == false) { if (masterNodes.isEmpty() == false) {
final ClusterNode clusterNode = randomFrom(masterNodes); final ClusterNode clusterNode = randomFrom(masterNodes);
final long term = rarely() ? randomLongBetween(0, maxTerm + 1) : clusterNode.state.getCurrentTerm(); final long term = rarely() ? randomLongBetween(0, maxTerm + 1) : clusterNode.state.getCurrentTerm();

View file

@ -187,11 +187,7 @@ public final class BlobStoreTestUtil {
} }
private static void assertIndexUUIDs(BlobStoreRepository repository, RepositoryData repositoryData) throws IOException { private static void assertIndexUUIDs(BlobStoreRepository repository, RepositoryData repositoryData) throws IOException {
final List<String> expectedIndexUUIDs = repositoryData.getIndices() final List<String> expectedIndexUUIDs = repositoryData.getIndices().values().stream().map(IndexId::getId).toList();
.values()
.stream()
.map(IndexId::getId)
.collect(Collectors.toList());
final BlobContainer indicesContainer = repository.blobContainer().children().get("indices"); final BlobContainer indicesContainer = repository.blobContainer().children().get("indices");
final List<String> foundIndexUUIDs; final List<String> foundIndexUUIDs;
if (indicesContainer == null) { if (indicesContainer == null) {
@ -231,7 +227,7 @@ public final class BlobStoreTestUtil {
) throws IOException { ) throws IOException {
final BlobContainer repoRoot = repository.blobContainer(); final BlobContainer repoRoot = repository.blobContainer();
final Collection<SnapshotId> snapshotIds = repositoryData.getSnapshotIds(); final Collection<SnapshotId> snapshotIds = repositoryData.getSnapshotIds();
final List<String> expectedSnapshotUUIDs = snapshotIds.stream().map(SnapshotId::getUUID).collect(Collectors.toList()); final List<String> expectedSnapshotUUIDs = snapshotIds.stream().map(SnapshotId::getUUID).toList();
for (String prefix : new String[] { BlobStoreRepository.SNAPSHOT_PREFIX, BlobStoreRepository.METADATA_PREFIX }) { for (String prefix : new String[] { BlobStoreRepository.SNAPSHOT_PREFIX, BlobStoreRepository.METADATA_PREFIX }) {
final Collection<String> foundSnapshotUUIDs = repoRoot.listBlobs() final Collection<String> foundSnapshotUUIDs = repoRoot.listBlobs()
.keySet() .keySet()

View file

@ -60,7 +60,6 @@ import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Collectors;
public class MockRepository extends FsRepository { public class MockRepository extends FsRepository {
private static final Logger logger = LogManager.getLogger(MockRepository.class); private static final Logger logger = LogManager.getLogger(MockRepository.class);
@ -517,7 +516,7 @@ public class MockRepository extends FsRepository {
final Map<String, BlobMetadata> blobs = listBlobs(); final Map<String, BlobMetadata> blobs = listBlobs();
long deleteBlobCount = blobs.size(); long deleteBlobCount = blobs.size();
long deleteByteCount = 0L; long deleteByteCount = 0L;
for (String blob : blobs.values().stream().map(BlobMetadata::name).collect(Collectors.toList())) { for (String blob : blobs.values().stream().map(BlobMetadata::name).toList()) {
maybeIOExceptionOrBlock(blob); maybeIOExceptionOrBlock(blob);
deleteBlobsIgnoringIfNotExists(Iterators.single(blob)); deleteBlobsIgnoringIfNotExists(Iterators.single(blob));
deleteByteCount += blobs.get(blob).length(); deleteByteCount += blobs.get(blob).length();

View file

@ -219,7 +219,7 @@ public abstract class ESSingleNodeTestCase extends ESTestCase {
protected List<String> filteredWarnings() { protected List<String> filteredWarnings() {
return Stream.concat( return Stream.concat(
super.filteredWarnings().stream(), super.filteredWarnings().stream(),
List.of("[index.data_path] setting was deprecated in Elasticsearch and will be removed in a future release.").stream() Stream.of("[index.data_path] setting was deprecated in Elasticsearch and will be removed in a future release.")
).collect(Collectors.toList()); ).collect(Collectors.toList());
} }

View file

@ -192,7 +192,7 @@ public class VersionUtils {
/** Returns a random {@link Version} from all available versions, that is compatible with the given version. */ /** Returns a random {@link Version} from all available versions, that is compatible with the given version. */
public static Version randomCompatibleVersion(Random random, Version version) { public static Version randomCompatibleVersion(Random random, Version version) {
final List<Version> compatible = ALL_VERSIONS.stream().filter(version::isCompatible).collect(Collectors.toList()); final List<Version> compatible = ALL_VERSIONS.stream().filter(version::isCompatible).toList();
return compatible.get(random.nextInt(compatible.size())); return compatible.get(random.nextInt(compatible.size()));
} }
@ -228,10 +228,7 @@ public class VersionUtils {
/** Returns the maximum {@link Version} that is compatible with the given version. */ /** Returns the maximum {@link Version} that is compatible with the given version. */
public static Version maxCompatibleVersion(Version version) { public static Version maxCompatibleVersion(Version version) {
final List<Version> compatible = ALL_VERSIONS.stream() final List<Version> compatible = ALL_VERSIONS.stream().filter(version::isCompatible).filter(version::onOrBefore).toList();
.filter(version::isCompatible)
.filter(version::onOrBefore)
.collect(Collectors.toList());
assert compatible.size() > 0; assert compatible.size() > 0;
return compatible.get(compatible.size() - 1); return compatible.get(compatible.size() - 1);
} }

View file

@ -839,13 +839,13 @@ public abstract class ESRestTestCase extends ESTestCase {
if (hasIlm && false == preserveILMPoliciesUponCompletion()) { if (hasIlm && false == preserveILMPoliciesUponCompletion()) {
Set<String> unexpectedIlmPlicies = getAllUnexpectedIlmPolicies(preserveILMPolicyIds()); Set<String> unexpectedIlmPlicies = getAllUnexpectedIlmPolicies(preserveILMPolicyIds());
assertTrue( assertTrue(
"Expected no ILM policies after deletions, but found " + unexpectedIlmPlicies.stream().collect(Collectors.joining(", ")), "Expected no ILM policies after deletions, but found " + String.join(", ", unexpectedIlmPlicies),
unexpectedIlmPlicies.isEmpty() unexpectedIlmPlicies.isEmpty()
); );
} }
Set<String> unexpectedTemplates = getAllUnexpectedTemplates(); Set<String> unexpectedTemplates = getAllUnexpectedTemplates();
assertTrue( assertTrue(
"Expected no templates after deletions, but found " + unexpectedTemplates.stream().collect(Collectors.joining(", ")), "Expected no templates after deletions, but found " + String.join(", ", unexpectedTemplates),
unexpectedTemplates.isEmpty() unexpectedTemplates.isEmpty()
); );
} }
@ -893,12 +893,10 @@ public abstract class ESRestTestCase extends ESTestCase {
Request compReq = new Request("GET", "_component_template"); Request compReq = new Request("GET", "_component_template");
String componentTemplates = EntityUtils.toString(adminClient().performRequest(compReq).getEntity()); String componentTemplates = EntityUtils.toString(adminClient().performRequest(compReq).getEntity());
Map<String, Object> cTemplates = XContentHelper.convertToMap(JsonXContent.jsonXContent, componentTemplates, false); Map<String, Object> cTemplates = XContentHelper.convertToMap(JsonXContent.jsonXContent, componentTemplates, false);
unexpectedTemplates.addAll( ((List<?>) cTemplates.get("component_templates")).stream()
((List<?>) cTemplates.get("component_templates")).stream() .map(ct -> (String) ((Map<?, ?>) ct).get("name"))
.map(ct -> (String) ((Map<?, ?>) ct).get("name")) .filter(name -> isXPackTemplate(name) == false)
.filter(name -> isXPackTemplate(name) == false) .forEach(unexpectedTemplates::add);
.collect(Collectors.toList())
);
} }
// Always check for legacy templates: // Always check for legacy templates:
Request getLegacyTemplatesRequest = new Request("GET", "_template"); Request getLegacyTemplatesRequest = new Request("GET", "_template");

View file

@ -61,7 +61,6 @@ import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.SortedSet; import java.util.SortedSet;
import java.util.TreeSet; import java.util.TreeSet;
import java.util.stream.Collectors;
import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder;
@ -326,7 +325,7 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase {
} }
filesSet.add(file); filesSet.add(file);
List<String> fileNames = filesSet.stream().map(p -> p.getFileName().toString()).collect(Collectors.toList()); List<String> fileNames = filesSet.stream().map(p -> p.getFileName().toString()).toList();
if (Collections.frequency(fileNames, file.getFileName().toString()) > 1) { if (Collections.frequency(fileNames, file.getFileName().toString()) > 1) {
Logger logger = LogManager.getLogger(ESClientYamlSuiteTestCase.class); Logger logger = LogManager.getLogger(ESClientYamlSuiteTestCase.class);
logger.warn( logger.warn(

View file

@ -76,7 +76,7 @@ public class MovingPercentilesPipelineAggregator extends PipelineAggregator {
List<TDigestState> values = buckets.stream() List<TDigestState> values = buckets.stream()
.map(b -> resolveTDigestBucketValue(histo, b, bucketsPaths()[0])) .map(b -> resolveTDigestBucketValue(histo, b, bucketsPaths()[0]))
.filter(v -> v != null) .filter(v -> v != null)
.collect(Collectors.toList()); .toList();
int index = 0; int index = 0;
for (InternalMultiBucketAggregation.InternalBucket bucket : buckets) { for (InternalMultiBucketAggregation.InternalBucket bucket : buckets) {
@ -126,7 +126,7 @@ public class MovingPercentilesPipelineAggregator extends PipelineAggregator {
List<DoubleHistogram> values = buckets.stream() List<DoubleHistogram> values = buckets.stream()
.map(b -> resolveHDRBucketValue(histo, b, bucketsPaths()[0])) .map(b -> resolveHDRBucketValue(histo, b, bucketsPaths()[0]))
.filter(v -> v != null) .filter(v -> v != null)
.collect(Collectors.toList()); .toList();
int index = 0; int index = 0;
for (InternalMultiBucketAggregation.InternalBucket bucket : buckets) { for (InternalMultiBucketAggregation.InternalBucket bucket : buckets) {

View file

@ -31,7 +31,6 @@ import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.Set; import java.util.Set;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg;
@ -342,7 +341,7 @@ public class XPackInfoResponse extends ActionResponse implements ToXContentObjec
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(); builder.startObject();
List<String> names = new ArrayList<>(this.featureSets.keySet()).stream().sorted().collect(Collectors.toList()); List<String> names = new ArrayList<>(this.featureSets.keySet()).stream().sorted().toList();
for (String name : names) { for (String name : names) {
builder.field(name, featureSets.get(name), params); builder.field(name, featureSets.get(name), params);
} }

View file

@ -16,7 +16,6 @@ import org.elasticsearch.xpack.core.ccr.action.PauseFollowAction;
import org.elasticsearch.xpack.core.ccr.action.ShardFollowTask; import org.elasticsearch.xpack.core.ccr.action.ShardFollowTask;
import java.util.List; import java.util.List;
import java.util.stream.Collectors;
final class PauseFollowerIndexStep extends AbstractUnfollowIndexStep { final class PauseFollowerIndexStep extends AbstractUnfollowIndexStep {
@ -46,7 +45,7 @@ final class PauseFollowerIndexStep extends AbstractUnfollowIndexStep {
ShardFollowTask shardFollowTask = (ShardFollowTask) persistentTask.getParams(); ShardFollowTask shardFollowTask = (ShardFollowTask) persistentTask.getParams();
return shardFollowTask.getFollowShardId().getIndexName().equals(followerIndex); return shardFollowTask.getFollowShardId().getIndexName().equals(followerIndex);
}) })
.collect(Collectors.toList()); .toList();
if (shardFollowTasks.isEmpty()) { if (shardFollowTasks.isEmpty()) {
listener.onResponse(null); listener.onResponse(null);

View file

@ -156,7 +156,7 @@ public final class PhaseCacheManagement {
.stream() .stream()
.filter(meta -> newPolicy.getName().equals(meta.getLifecyclePolicyName())) .filter(meta -> newPolicy.getName().equals(meta.getLifecyclePolicyName()))
.filter(meta -> isIndexPhaseDefinitionUpdatable(xContentRegistry, client, meta, newPolicy.getPolicy(), licenseState)) .filter(meta -> isIndexPhaseDefinitionUpdatable(xContentRegistry, client, meta, newPolicy.getPolicy(), licenseState))
.collect(Collectors.toList()); .toList();
final List<String> refreshedIndices = new ArrayList<>(indicesThatCanBeUpdated.size()); final List<String> refreshedIndices = new ArrayList<>(indicesThatCanBeUpdated.size());
for (IndexMetadata index : indicesThatCanBeUpdated) { for (IndexMetadata index : indicesThatCanBeUpdated) {

View file

@ -79,7 +79,7 @@ public class SegmentCountStep extends AsyncWaitStep {
.stream() .stream()
.flatMap(iss -> Arrays.stream(iss.shards())) .flatMap(iss -> Arrays.stream(iss.shards()))
.filter(shardSegments -> shardSegments.getSegments().size() > maxNumSegments) .filter(shardSegments -> shardSegments.getSegments().size() > maxNumSegments)
.collect(Collectors.toList()); .toList();
if (unmergedShards.size() > 0) { if (unmergedShards.size() > 0) {
Map<ShardRouting, Integer> unmergedShardCounts = unmergedShards.stream() Map<ShardRouting, Integer> unmergedShardCounts = unmergedShards.stream()
.collect(Collectors.toMap(ShardSegments::getShardRouting, ss -> ss.getSegments().size())); .collect(Collectors.toMap(ShardSegments::getShardRouting, ss -> ss.getSegments().size()));

View file

@ -63,7 +63,7 @@ final class WaitForFollowShardTasksStep extends AsyncWaitStep {
.stream() .stream()
.map(FollowStatsAction.StatsResponse::status) .map(FollowStatsAction.StatsResponse::status)
.filter(shardFollowStatus -> shardFollowStatus.leaderGlobalCheckpoint() != shardFollowStatus.followerGlobalCheckpoint()) .filter(shardFollowStatus -> shardFollowStatus.leaderGlobalCheckpoint() != shardFollowStatus.followerGlobalCheckpoint())
.collect(Collectors.toList()); .toList();
// Follow stats api needs to return stats for follower index and all shard follow tasks should be synced: // Follow stats api needs to return stats for follower index and all shard follow tasks should be synced:
boolean conditionMet = responses.getStatsResponses().size() > 0 && unSyncedShardFollowStatuses.isEmpty(); boolean conditionMet = responses.getStatsResponses().size() > 0 && unSyncedShardFollowStatuses.isEmpty();

View file

@ -20,7 +20,6 @@ import java.util.Comparator;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.IntStream; import java.util.stream.IntStream;
public final class InferenceHelpers { public final class InferenceHelpers {
@ -68,7 +67,7 @@ public final class InferenceHelpers {
List<String> labels = classificationLabels == null ? List<String> labels = classificationLabels == null ?
// If we don't have the labels we should return the top classification values anyways, they will just be numeric // If we don't have the labels we should return the top classification values anyways, they will just be numeric
IntStream.range(0, probabilities.length).boxed().map(String::valueOf).collect(Collectors.toList()) : classificationLabels; IntStream.range(0, probabilities.length).mapToObj(String::valueOf).toList() : classificationLabels;
int count = numToInclude < 0 ? probabilities.length : Math.min(numToInclude, probabilities.length); int count = numToInclude < 0 ? probabilities.length : Math.min(numToInclude, probabilities.length);
List<TopClassEntry> topClassEntries = new ArrayList<>(count); List<TopClassEntry> topClassEntries = new ArrayList<>(count);

View file

@ -200,10 +200,9 @@ public class Detector implements ToXContentObject, Writeable {
* ", \ * ", \
*/ */
public static final Character[] PROHIBITED_FIELDNAME_CHARACTERS = { '"', '\\' }; public static final Character[] PROHIBITED_FIELDNAME_CHARACTERS = { '"', '\\' };
public static final String PROHIBITED = String.join( public static final String PROHIBITED = Arrays.stream(PROHIBITED_FIELDNAME_CHARACTERS)
",", .map(c -> Character.toString(c))
Arrays.stream(PROHIBITED_FIELDNAME_CHARACTERS).map(c -> Character.toString(c)).collect(Collectors.toList()) .collect(Collectors.joining(","));
);
private final String detectorDescription; private final String detectorDescription;
private final DetectorFunction function; private final DetectorFunction function;

View file

@ -18,7 +18,6 @@ import java.util.Set;
import java.util.SortedSet; import java.util.SortedSet;
import java.util.TreeSet; import java.util.TreeSet;
import java.util.function.Function; import java.util.function.Function;
import java.util.stream.Collectors;
/** /**
* Expands an expression into the set of matching names. * Expands an expression into the set of matching names.
@ -92,7 +91,7 @@ public abstract class NameResolver {
.filter(key -> Regex.simpleMatch(token, key)) .filter(key -> Regex.simpleMatch(token, key))
.map(this::lookup) .map(this::lookup)
.flatMap(List::stream) .flatMap(List::stream)
.collect(Collectors.toList()); .toList();
if (expanded.isEmpty() && allowNoMatch == false) { if (expanded.isEmpty() && allowNoMatch == false) {
throw notFoundExceptionSupplier.apply(token); throw notFoundExceptionSupplier.apply(token);
} }

View file

@ -14,11 +14,9 @@ import org.elasticsearch.xcontent.ToXContentObject;
import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentBuilder;
import java.io.IOException; import java.io.IOException;
import java.util.Collections;
import java.util.Comparator; import java.util.Comparator;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.stream.Collectors;
/** /**
* Represents the rollup capabilities of a non-rollup index. E.g. what values/aggregations * Represents the rollup capabilities of a non-rollup index. E.g. what values/aggregations
@ -35,9 +33,7 @@ public class RollableIndexCaps implements Writeable, ToXContentObject {
public RollableIndexCaps(String indexName, List<RollupJobCaps> caps) { public RollableIndexCaps(String indexName, List<RollupJobCaps> caps) {
this.indexName = indexName; this.indexName = indexName;
this.jobCaps = Collections.unmodifiableList( this.jobCaps = caps.stream().sorted(Comparator.comparing(RollupJobCaps::getJobID)).toList();
Objects.requireNonNull(caps).stream().sorted(Comparator.comparing(RollupJobCaps::getJobID)).collect(Collectors.toList())
);
} }
public RollableIndexCaps(StreamInput in) throws IOException { public RollableIndexCaps(StreamInput in) throws IOException {

View file

@ -190,7 +190,7 @@ public class RollupJobCaps implements Writeable, ToXContentObject {
final List<Map<String, Object>> metrics = metricConfig.getMetrics() final List<Map<String, Object>> metrics = metricConfig.getMetrics()
.stream() .stream()
.map(metric -> singletonMap("agg", (Object) metric)) .map(metric -> singletonMap("agg", (Object) metric))
.collect(Collectors.toList()); .toList();
metrics.forEach(m -> { metrics.forEach(m -> {
List<Map<String, Object>> caps = tempFieldCaps.getOrDefault(metricConfig.getField(), new ArrayList<>()); List<Map<String, Object>> caps = tempFieldCaps.getOrDefault(metricConfig.getField(), new ArrayList<>());
caps.add(m); caps.add(m);

View file

@ -27,7 +27,6 @@ import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.Locale; import java.util.Locale;
import java.util.Objects; import java.util.Objects;
import java.util.stream.Collectors;
import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.action.ValidateActions.addValidationError;
import static org.elasticsearch.common.settings.Settings.readSettingsFromStream; import static org.elasticsearch.common.settings.Settings.readSettingsFromStream;
@ -71,7 +70,7 @@ public class MountSearchableSnapshotRequest extends MasterNodeRequest<MountSearc
PARSER.declareField(optionalConstructorArg(), Settings::fromXContent, INDEX_SETTINGS_FIELD, ObjectParser.ValueType.OBJECT); PARSER.declareField(optionalConstructorArg(), Settings::fromXContent, INDEX_SETTINGS_FIELD, ObjectParser.ValueType.OBJECT);
PARSER.declareField( PARSER.declareField(
optionalConstructorArg(), optionalConstructorArg(),
p -> p.list().stream().map(s -> (String) s).collect(Collectors.toList()).toArray(Strings.EMPTY_ARRAY), p -> p.list().stream().map(s -> (String) s).toArray(String[]::new),
IGNORE_INDEX_SETTINGS_FIELD, IGNORE_INDEX_SETTINGS_FIELD,
ObjectParser.ValueType.STRING_ARRAY ObjectParser.ValueType.STRING_ARRAY
); );

View file

@ -24,7 +24,6 @@ import java.util.List;
import java.util.Objects; import java.util.Objects;
import static java.util.Collections.unmodifiableList; import static java.util.Collections.unmodifiableList;
import static java.util.stream.Collectors.toList;
public class SearchableSnapshotShardStats implements Writeable, ToXContentObject { public class SearchableSnapshotShardStats implements Writeable, ToXContentObject {
@ -97,7 +96,7 @@ public class SearchableSnapshotShardStats implements Writeable, ToXContentObject
{ {
List<CacheIndexInputStats> stats = inputStats.stream() List<CacheIndexInputStats> stats = inputStats.stream()
.sorted(Comparator.comparing(CacheIndexInputStats::getFileExt)) .sorted(Comparator.comparing(CacheIndexInputStats::getFileExt))
.collect(toList()); .toList();
for (CacheIndexInputStats stat : stats) { for (CacheIndexInputStats stat : stats) {
stat.toXContent(builder, params); stat.toXContent(builder, params);
} }

View file

@ -111,9 +111,7 @@ public final class PutPrivilegesRequest extends ActionRequest implements Applica
@Override @Override
public Collection<String> getApplicationNames() { public Collection<String> getApplicationNames() {
return Collections.unmodifiableSet( return privileges.stream().map(ApplicationPrivilegeDescriptor::getApplication).collect(Collectors.toUnmodifiableSet());
privileges.stream().map(ApplicationPrivilegeDescriptor::getApplication).collect(Collectors.toSet())
);
} }
@Override @Override

View file

@ -1393,10 +1393,10 @@ public final class Authentication implements ToXContentObject {
final Map<String, Object> roleDescriptorsMap = convertRoleDescriptorsBytesToMap(roleDescriptorsBytes); final Map<String, Object> roleDescriptorsMap = convertRoleDescriptorsBytesToMap(roleDescriptorsBytes);
final AtomicBoolean removedAtLeastOne = new AtomicBoolean(false); final AtomicBoolean removedAtLeastOne = new AtomicBoolean(false);
roleDescriptorsMap.entrySet().stream().forEach(entry -> { roleDescriptorsMap.forEach((key, value) -> {
if (entry.getValue() instanceof Map) { if (value instanceof Map) {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
Map<String, Object> roleDescriptor = (Map<String, Object>) entry.getValue(); Map<String, Object> roleDescriptor = (Map<String, Object>) value;
boolean removed = roleDescriptor.remove(RoleDescriptor.Fields.REMOTE_INDICES.getPreferredName()) != null; boolean removed = roleDescriptor.remove(RoleDescriptor.Fields.REMOTE_INDICES.getPreferredName()) != null;
if (removed) { if (removed) {
removedAtLeastOne.set(true); removedAtLeastOne.set(true);

View file

@ -186,12 +186,12 @@ public class DefaultAuthenticationFailureHandler implements AuthenticationFailur
ese = authenticationError(message, t, args); ese = authenticationError(message, t, args);
containsNegotiateWithToken = false; containsNegotiateWithToken = false;
} }
defaultFailureResponseHeaders.entrySet().stream().forEach((e) -> { defaultFailureResponseHeaders.forEach((key, value) -> {
if (containsNegotiateWithToken && e.getKey().equalsIgnoreCase("WWW-Authenticate")) { if (containsNegotiateWithToken && key.equalsIgnoreCase("WWW-Authenticate")) {
return; return;
} }
// If it is already present then it will replace the existing header. // If it is already present then it will replace the existing header.
ese.addHeader(e.getKey(), e.getValue()); ese.addHeader(key, value);
}); });
return ese; return ese;
} }

View file

@ -632,8 +632,7 @@ public interface AuthorizationEngine {
|| Arrays.equals(IndicesAndAliasesResolverField.NO_INDICES_OR_ALIASES_ARRAY, indices)) { || Arrays.equals(IndicesAndAliasesResolverField.NO_INDICES_OR_ALIASES_ARRAY, indices)) {
return null; return null;
} }
Set<String> deniedIndices = Arrays.asList(indices) Set<String> deniedIndices = Arrays.stream(indices)
.stream()
.filter(index -> false == indicesAccessControl.hasIndexPermissions(index)) .filter(index -> false == indicesAccessControl.hasIndexPermissions(index))
.collect(Collectors.toSet()); .collect(Collectors.toSet());
return getFailureDescription(deniedIndices, restrictedIndices); return getFailureDescription(deniedIndices, restrictedIndices);

View file

@ -74,9 +74,7 @@ public final class ResourcePrivilegesMap {
public Builder addResourcePrivilegesMap(ResourcePrivilegesMap resourcePrivilegesMap) { public Builder addResourcePrivilegesMap(ResourcePrivilegesMap resourcePrivilegesMap) {
resourcePrivilegesMap.getResourceToResourcePrivileges() resourcePrivilegesMap.getResourceToResourcePrivileges()
.entrySet() .forEach((key, value) -> this.addResourcePrivilege(key, value.getPrivileges()));
.stream()
.forEach(e -> this.addResourcePrivilege(e.getKey(), e.getValue().getPrivileges()));
return this; return this;
} }

View file

@ -230,7 +230,7 @@ public class SnapshotLifecycleStats implements Writeable, ToXContentObject {
List<SnapshotPolicyStats> metrics = getMetrics().values() List<SnapshotPolicyStats> metrics = getMetrics().values()
.stream() .stream()
.sorted(Comparator.comparing(SnapshotPolicyStats::getPolicyId)) // maintain a consistent order when serializing .sorted(Comparator.comparing(SnapshotPolicyStats::getPolicyId)) // maintain a consistent order when serializing
.collect(Collectors.toList()); .toList();
long totalTaken = metrics.stream().mapToLong(s -> s.snapshotsTaken.count()).sum(); long totalTaken = metrics.stream().mapToLong(s -> s.snapshotsTaken.count()).sum();
long totalFailed = metrics.stream().mapToLong(s -> s.snapshotsFailed.count()).sum(); long totalFailed = metrics.stream().mapToLong(s -> s.snapshotsFailed.count()).sum();
long totalDeleted = metrics.stream().mapToLong(s -> s.snapshotsDeleted.count()).sum(); long totalDeleted = metrics.stream().mapToLong(s -> s.snapshotsDeleted.count()).sum();

View file

@ -31,7 +31,6 @@ import java.util.Objects;
import java.util.Set; import java.util.Set;
import java.util.function.LongSupplier; import java.util.function.LongSupplier;
import java.util.function.Predicate; import java.util.function.Predicate;
import java.util.stream.Collectors;
import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.core.Strings.format;
@ -132,9 +131,7 @@ public class SnapshotRetentionConfiguration implements ToXContentObject, Writeab
*/ */
public Predicate<SnapshotInfo> getSnapshotDeletionPredicate(final List<SnapshotInfo> allSnapshots) { public Predicate<SnapshotInfo> getSnapshotDeletionPredicate(final List<SnapshotInfo> allSnapshots) {
final int totalSnapshotCount = allSnapshots.size(); final int totalSnapshotCount = allSnapshots.size();
final List<SnapshotInfo> sortedSnapshots = allSnapshots.stream() final List<SnapshotInfo> sortedSnapshots = allSnapshots.stream().sorted(Comparator.comparingLong(SnapshotInfo::startTime)).toList();
.sorted(Comparator.comparingLong(SnapshotInfo::startTime))
.collect(Collectors.toList());
int successCount = 0; int successCount = 0;
long latestSuccessfulTimestamp = Long.MIN_VALUE; long latestSuccessfulTimestamp = Long.MIN_VALUE;
for (SnapshotInfo snapshot : allSnapshots) { for (SnapshotInfo snapshot : allSnapshots) {

View file

@ -407,9 +407,10 @@ public class SSLService {
} }
public Set<String> getTransportProfileContextNames() { public Set<String> getTransportProfileContextNames() {
return Collections.unmodifiableSet( return this.sslConfigurations.keySet()
this.sslConfigurations.keySet().stream().filter(k -> k.startsWith("transport.profiles.")).collect(Collectors.toSet()) .stream()
); .filter(k -> k.startsWith("transport.profiles."))
.collect(Collectors.toUnmodifiableSet());
} }
/** /**

View file

@ -208,8 +208,7 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
AllocationService allocationService, AllocationService allocationService,
IndicesService indicesService IndicesService indicesService
) { ) {
List<Object> components = new ArrayList<>(); List<Object> components = new ArrayList<>(
components.addAll(
super.createComponents( super.createComponents(
client, client,
clusterService, clusterService,
@ -228,68 +227,61 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
) )
); );
filterPlugins(Plugin.class).stream() filterPlugins(Plugin.class).forEach(
.forEach( p -> components.addAll(
p -> components.addAll( p.createComponents(
p.createComponents( client,
client, clusterService,
clusterService, threadPool,
threadPool, resourceWatcherService,
resourceWatcherService, scriptService,
scriptService, xContentRegistry,
xContentRegistry, environment,
environment, nodeEnvironment,
nodeEnvironment, namedWriteableRegistry,
namedWriteableRegistry, expressionResolver,
expressionResolver, repositoriesServiceSupplier,
repositoriesServiceSupplier, tracer,
tracer, allocationService,
allocationService, indicesService
indicesService
)
) )
); )
);
return components; return components;
} }
@Override @Override
public Collection<RestHeaderDefinition> getRestHeaders() { public Collection<RestHeaderDefinition> getRestHeaders() {
List<RestHeaderDefinition> headers = new ArrayList<>(); List<RestHeaderDefinition> headers = new ArrayList<>(super.getRestHeaders());
headers.addAll(super.getRestHeaders()); filterPlugins(ActionPlugin.class).forEach(p -> headers.addAll(p.getRestHeaders()));
filterPlugins(ActionPlugin.class).stream().forEach(p -> headers.addAll(p.getRestHeaders()));
return headers; return headers;
} }
@Override @Override
public List<Setting<?>> getSettings() { public List<Setting<?>> getSettings() {
ArrayList<Setting<?>> settings = new ArrayList<>(); ArrayList<Setting<?>> settings = new ArrayList<>(super.getSettings());
settings.addAll(super.getSettings()); filterPlugins(Plugin.class).forEach(p -> settings.addAll(p.getSettings()));
filterPlugins(Plugin.class).stream().forEach(p -> settings.addAll(p.getSettings()));
return settings; return settings;
} }
@Override @Override
public List<String> getSettingsFilter() { public List<String> getSettingsFilter() {
List<String> filters = new ArrayList<>(); List<String> filters = new ArrayList<>(super.getSettingsFilter());
filters.addAll(super.getSettingsFilter()); filterPlugins(Plugin.class).forEach(p -> filters.addAll(p.getSettingsFilter()));
filterPlugins(Plugin.class).stream().forEach(p -> filters.addAll(p.getSettingsFilter()));
return filters; return filters;
} }
@Override @Override
public List<ActionHandler<? extends ActionRequest, ? extends ActionResponse>> getActions() { public List<ActionHandler<? extends ActionRequest, ? extends ActionResponse>> getActions() {
List<ActionHandler<? extends ActionRequest, ? extends ActionResponse>> actions = new ArrayList<>(); List<ActionHandler<? extends ActionRequest, ? extends ActionResponse>> actions = new ArrayList<>(super.getActions());
actions.addAll(super.getActions()); filterPlugins(ActionPlugin.class).forEach(p -> actions.addAll(p.getActions()));
filterPlugins(ActionPlugin.class).stream().forEach(p -> actions.addAll(p.getActions()));
return actions; return actions;
} }
@Override @Override
public List<ActionFilter> getActionFilters() { public List<ActionFilter> getActionFilters() {
List<ActionFilter> filters = new ArrayList<>(); List<ActionFilter> filters = new ArrayList<>(super.getActionFilters());
filters.addAll(super.getActionFilters()); filterPlugins(ActionPlugin.class).forEach(p -> filters.addAll(p.getActionFilters()));
filterPlugins(ActionPlugin.class).stream().forEach(p -> filters.addAll(p.getActionFilters()));
return filters; return filters;
} }
@ -303,8 +295,7 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
IndexNameExpressionResolver indexNameExpressionResolver, IndexNameExpressionResolver indexNameExpressionResolver,
Supplier<DiscoveryNodes> nodesInCluster Supplier<DiscoveryNodes> nodesInCluster
) { ) {
List<RestHandler> handlers = new ArrayList<>(); List<RestHandler> handlers = new ArrayList<>(
handlers.addAll(
super.getRestHandlers( super.getRestHandlers(
settings, settings,
restController, restController,
@ -315,27 +306,25 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
nodesInCluster nodesInCluster
) )
); );
filterPlugins(ActionPlugin.class).stream() filterPlugins(ActionPlugin.class).forEach(
.forEach( p -> handlers.addAll(
p -> handlers.addAll( p.getRestHandlers(
p.getRestHandlers( settings,
settings, restController,
restController, clusterSettings,
clusterSettings, indexScopedSettings,
indexScopedSettings, settingsFilter,
settingsFilter, indexNameExpressionResolver,
indexNameExpressionResolver, nodesInCluster
nodesInCluster
)
) )
); )
);
return handlers; return handlers;
} }
@Override @Override
public List<NamedWriteableRegistry.Entry> getNamedWriteables() { public List<NamedWriteableRegistry.Entry> getNamedWriteables() {
List<NamedWriteableRegistry.Entry> entries = new ArrayList<>(); List<NamedWriteableRegistry.Entry> entries = new ArrayList<>(super.getNamedWriteables());
entries.addAll(super.getNamedWriteables());
for (Plugin p : plugins) { for (Plugin p : plugins) {
entries.addAll(p.getNamedWriteables()); entries.addAll(p.getNamedWriteables());
} }
@ -344,8 +333,7 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
@Override @Override
public List<NamedXContentRegistry.Entry> getNamedXContent() { public List<NamedXContentRegistry.Entry> getNamedXContent() {
List<NamedXContentRegistry.Entry> entries = new ArrayList<>(); List<NamedXContentRegistry.Entry> entries = new ArrayList<>(super.getNamedXContent());
entries.addAll(super.getNamedXContent());
for (Plugin p : plugins) { for (Plugin p : plugins) {
entries.addAll(p.getNamedXContent()); entries.addAll(p.getNamedXContent());
} }
@ -358,29 +346,30 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
public Settings additionalSettings() { public Settings additionalSettings() {
Settings.Builder builder = Settings.builder(); Settings.Builder builder = Settings.builder();
builder.put(super.additionalSettings()); builder.put(super.additionalSettings());
filterPlugins(Plugin.class).stream().forEach(p -> builder.put(p.additionalSettings())); filterPlugins(Plugin.class).forEach(p -> builder.put(p.additionalSettings()));
return builder.build(); return builder.build();
} }
@Override @Override
public List<ScriptContext<?>> getContexts() { public List<ScriptContext<?>> getContexts() {
List<ScriptContext<?>> contexts = new ArrayList<>(); List<ScriptContext<?>> contexts = new ArrayList<>();
filterPlugins(ScriptPlugin.class).stream().forEach(p -> contexts.addAll(p.getContexts())); filterPlugins(ScriptPlugin.class).forEach(p -> contexts.addAll(p.getContexts()));
return contexts; return contexts;
} }
@Override @Override
public Map<String, Processor.Factory> getProcessors(Processor.Parameters parameters) { public Map<String, Processor.Factory> getProcessors(Processor.Parameters parameters) {
Map<String, Processor.Factory> processors = new HashMap<>(); Map<String, Processor.Factory> processors = new HashMap<>();
filterPlugins(IngestPlugin.class).stream().forEach(p -> processors.putAll(p.getProcessors(parameters))); filterPlugins(IngestPlugin.class).forEach(p -> processors.putAll(p.getProcessors(parameters)));
return processors; return processors;
} }
@Override @Override
public List<TransportInterceptor> getTransportInterceptors(NamedWriteableRegistry namedWriteableRegistry, ThreadContext threadContext) { public List<TransportInterceptor> getTransportInterceptors(NamedWriteableRegistry namedWriteableRegistry, ThreadContext threadContext) {
List<TransportInterceptor> interceptors = new ArrayList<>(); List<TransportInterceptor> interceptors = new ArrayList<>();
filterPlugins(NetworkPlugin.class).stream() filterPlugins(NetworkPlugin.class).forEach(
.forEach(p -> interceptors.addAll(p.getTransportInterceptors(namedWriteableRegistry, threadContext))); p -> interceptors.addAll(p.getTransportInterceptors(namedWriteableRegistry, threadContext))
);
return interceptors; return interceptors;
} }
@ -393,16 +382,14 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
NamedWriteableRegistry namedWriteableRegistry, NamedWriteableRegistry namedWriteableRegistry,
NetworkService networkService NetworkService networkService
) { ) {
Map<String, Supplier<Transport>> transports = new HashMap<>(); Map<String, Supplier<Transport>> transports = new HashMap<>(
transports.putAll(
super.getTransports(settings, threadPool, pageCacheRecycler, circuitBreakerService, namedWriteableRegistry, networkService) super.getTransports(settings, threadPool, pageCacheRecycler, circuitBreakerService, namedWriteableRegistry, networkService)
); );
filterPlugins(NetworkPlugin.class).stream() filterPlugins(NetworkPlugin.class).forEach(
.forEach( p -> transports.putAll(
p -> transports.putAll( p.getTransports(settings, threadPool, pageCacheRecycler, circuitBreakerService, namedWriteableRegistry, networkService)
p.getTransports(settings, threadPool, pageCacheRecycler, circuitBreakerService, namedWriteableRegistry, networkService) )
) );
);
return transports; return transports;
} }
@ -422,31 +409,30 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
Tracer tracer Tracer tracer
) { ) {
Map<String, Supplier<HttpServerTransport>> transports = new HashMap<>(); Map<String, Supplier<HttpServerTransport>> transports = new HashMap<>();
filterPlugins(NetworkPlugin.class).stream() filterPlugins(NetworkPlugin.class).forEach(
.forEach( p -> transports.putAll(
p -> transports.putAll( p.getHttpTransports(
p.getHttpTransports( settings,
settings, threadPool,
threadPool, bigArrays,
bigArrays, pageCacheRecycler,
pageCacheRecycler, circuitBreakerService,
circuitBreakerService, xContentRegistry,
xContentRegistry, networkService,
networkService, dispatcher,
dispatcher, perRequestThreadContext,
perRequestThreadContext, clusterSettings,
clusterSettings, tracer
tracer
)
) )
); )
);
return transports; return transports;
} }
@Override @Override
public List<BootstrapCheck> getBootstrapChecks() { public List<BootstrapCheck> getBootstrapChecks() {
List<BootstrapCheck> checks = new ArrayList<>(); List<BootstrapCheck> checks = new ArrayList<>();
filterPlugins(Plugin.class).stream().forEach(p -> checks.addAll(p.getBootstrapChecks())); filterPlugins(Plugin.class).forEach(p -> checks.addAll(p.getBootstrapChecks()));
return Collections.unmodifiableList(checks); return Collections.unmodifiableList(checks);
} }
@ -459,7 +445,7 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
.map(RestInterceptorActionPlugin.class::cast) .map(RestInterceptorActionPlugin.class::cast)
.map(p -> p.getRestHandlerInterceptor(threadContext)) .map(p -> p.getRestHandlerInterceptor(threadContext))
.filter(Objects::nonNull) .filter(Objects::nonNull)
.collect(Collectors.toList()); .toList();
if (items.size() > 1) { if (items.size() > 1) {
throw new UnsupportedOperationException("Only the security ActionPlugin should override this"); throw new UnsupportedOperationException("Only the security ActionPlugin should override this");
@ -473,7 +459,7 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
@Override @Override
public List<ExecutorBuilder<?>> getExecutorBuilders(final Settings settings) { public List<ExecutorBuilder<?>> getExecutorBuilders(final Settings settings) {
List<ExecutorBuilder<?>> builders = new ArrayList<>(); List<ExecutorBuilder<?>> builders = new ArrayList<>();
filterPlugins(Plugin.class).stream().forEach(p -> builders.addAll(p.getExecutorBuilders(settings))); filterPlugins(Plugin.class).forEach(p -> builders.addAll(p.getExecutorBuilders(settings)));
return builders; return builders;
} }
@ -490,14 +476,14 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
@Override @Override
public Map<String, ElectionStrategy> getElectionStrategies() { public Map<String, ElectionStrategy> getElectionStrategies() {
Map<String, ElectionStrategy> electionStrategies = new HashMap<>(); Map<String, ElectionStrategy> electionStrategies = new HashMap<>();
filterPlugins(ClusterCoordinationPlugin.class).stream().forEach(p -> electionStrategies.putAll(p.getElectionStrategies())); filterPlugins(ClusterCoordinationPlugin.class).forEach(p -> electionStrategies.putAll(p.getElectionStrategies()));
return electionStrategies; return electionStrategies;
} }
@Override @Override
public Collection<IndexSettingProvider> getAdditionalIndexSettingProviders(IndexSettingProvider.Parameters parameters) { public Collection<IndexSettingProvider> getAdditionalIndexSettingProviders(IndexSettingProvider.Parameters parameters) {
Set<IndexSettingProvider> providers = new HashSet<>(); Set<IndexSettingProvider> providers = new HashSet<>();
filterPlugins(Plugin.class).stream().forEach(p -> providers.addAll(p.getAdditionalIndexSettingProviders(parameters))); filterPlugins(Plugin.class).forEach(p -> providers.addAll(p.getAdditionalIndexSettingProviders(parameters)));
providers.addAll(super.getAdditionalIndexSettingProviders(parameters)); providers.addAll(super.getAdditionalIndexSettingProviders(parameters));
return providers; return providers;
@ -506,21 +492,19 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
@Override @Override
public Map<String, AnalysisModule.AnalysisProvider<TokenizerFactory>> getTokenizers() { public Map<String, AnalysisModule.AnalysisProvider<TokenizerFactory>> getTokenizers() {
Map<String, AnalysisModule.AnalysisProvider<TokenizerFactory>> tokenizers = new HashMap<>(); Map<String, AnalysisModule.AnalysisProvider<TokenizerFactory>> tokenizers = new HashMap<>();
filterPlugins(AnalysisPlugin.class).stream().forEach(p -> tokenizers.putAll(p.getTokenizers())); filterPlugins(AnalysisPlugin.class).forEach(p -> tokenizers.putAll(p.getTokenizers()));
return tokenizers; return tokenizers;
} }
@Override @Override
public void onIndexModule(IndexModule indexModule) { public void onIndexModule(IndexModule indexModule) {
super.onIndexModule(indexModule); super.onIndexModule(indexModule);
filterPlugins(Plugin.class).stream().forEach(p -> p.onIndexModule(indexModule)); filterPlugins(Plugin.class).forEach(p -> p.onIndexModule(indexModule));
} }
@Override @Override
public Function<String, Predicate<String>> getFieldFilter() { public Function<String, Predicate<String>> getFieldFilter() {
List<Function<String, Predicate<String>>> items = filterPlugins(MapperPlugin.class).stream() List<Function<String, Predicate<String>>> items = filterPlugins(MapperPlugin.class).stream().map(p -> p.getFieldFilter()).toList();
.map(p -> p.getFieldFilter())
.collect(Collectors.toList());
if (items.size() > 1) { if (items.size() > 1) {
throw new UnsupportedOperationException("Only the security MapperPlugin should override this"); throw new UnsupportedOperationException("Only the security MapperPlugin should override this");
} else if (items.size() == 1) { } else if (items.size() == 1) {
@ -536,7 +520,7 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
// There can be only one. // There can be only one.
List<BiConsumer<DiscoveryNode, ClusterState>> items = filterPlugins(ClusterCoordinationPlugin.class).stream() List<BiConsumer<DiscoveryNode, ClusterState>> items = filterPlugins(ClusterCoordinationPlugin.class).stream()
.map(p -> p.getJoinValidator()) .map(p -> p.getJoinValidator())
.collect(Collectors.toList()); .toList();
if (items.size() > 1) { if (items.size() > 1) {
throw new UnsupportedOperationException("Only the security DiscoveryPlugin should override this"); throw new UnsupportedOperationException("Only the security DiscoveryPlugin should override this");
} else if (items.size() == 1) { } else if (items.size() == 1) {
@ -598,7 +582,7 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
List<BiConsumer<Snapshot, IndexVersion>> checks = filterPlugins(RepositoryPlugin.class).stream() List<BiConsumer<Snapshot, IndexVersion>> checks = filterPlugins(RepositoryPlugin.class).stream()
.map(RepositoryPlugin::addPreRestoreVersionCheck) .map(RepositoryPlugin::addPreRestoreVersionCheck)
.filter(Objects::nonNull) .filter(Objects::nonNull)
.collect(Collectors.toList()); .toList();
return checks.isEmpty() ? null : (s, v) -> checks.forEach(c -> c.accept(s, v)); return checks.isEmpty() ? null : (s, v) -> checks.forEach(c -> c.accept(s, v));
} }
@ -611,7 +595,7 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
public Optional<EngineFactory> getEngineFactory(IndexSettings indexSettings) { public Optional<EngineFactory> getEngineFactory(IndexSettings indexSettings) {
List<Optional<EngineFactory>> enginePlugins = filterPlugins(EnginePlugin.class).stream() List<Optional<EngineFactory>> enginePlugins = filterPlugins(EnginePlugin.class).stream()
.map(p -> p.getEngineFactory(indexSettings)) .map(p -> p.getEngineFactory(indexSettings))
.collect(Collectors.toList()); .toList();
if (enginePlugins.size() == 0) { if (enginePlugins.size() == 0) {
return Optional.empty(); return Optional.empty();
} else if (enginePlugins.size() == 1) { } else if (enginePlugins.size() == 1) {
@ -639,7 +623,7 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
deciders.addAll( deciders.addAll(
filterPlugins(ClusterPlugin.class).stream() filterPlugins(ClusterPlugin.class).stream()
.flatMap(p -> p.createAllocationDeciders(settings, clusterSettings).stream()) .flatMap(p -> p.createAllocationDeciders(settings, clusterSettings).stream())
.collect(Collectors.toList()) .toList()
); );
deciders.addAll(super.createAllocationDeciders(settings, clusterSettings)); deciders.addAll(super.createAllocationDeciders(settings, clusterSettings));
return deciders; return deciders;
@ -648,21 +632,21 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin
@Override @Override
public Map<String, ExistingShardsAllocator> getExistingShardsAllocators() { public Map<String, ExistingShardsAllocator> getExistingShardsAllocators() {
final Map<String, ExistingShardsAllocator> allocators = new HashMap<>(); final Map<String, ExistingShardsAllocator> allocators = new HashMap<>();
filterPlugins(ClusterPlugin.class).stream().forEach(p -> allocators.putAll(p.getExistingShardsAllocators())); filterPlugins(ClusterPlugin.class).forEach(p -> allocators.putAll(p.getExistingShardsAllocators()));
return allocators; return allocators;
} }
@Override @Override
public Map<String, IndexStorePlugin.DirectoryFactory> getDirectoryFactories() { public Map<String, IndexStorePlugin.DirectoryFactory> getDirectoryFactories() {
final Map<String, IndexStorePlugin.DirectoryFactory> factories = new HashMap<>(); final Map<String, IndexStorePlugin.DirectoryFactory> factories = new HashMap<>();
filterPlugins(IndexStorePlugin.class).stream().forEach(p -> factories.putAll(p.getDirectoryFactories())); filterPlugins(IndexStorePlugin.class).forEach(p -> factories.putAll(p.getDirectoryFactories()));
return factories; return factories;
} }
@Override @Override
public Map<String, RecoveryStateFactory> getRecoveryStateFactories() { public Map<String, RecoveryStateFactory> getRecoveryStateFactories() {
final Map<String, RecoveryStateFactory> factories = new HashMap<>(); final Map<String, RecoveryStateFactory> factories = new HashMap<>();
filterPlugins(IndexStorePlugin.class).stream().forEach(p -> factories.putAll(p.getRecoveryStateFactories())); filterPlugins(IndexStorePlugin.class).forEach(p -> factories.putAll(p.getRecoveryStateFactories()));
return factories; return factories;
} }

View file

@ -134,10 +134,10 @@ public class DeprecationInfoAction extends ActionType<DeprecationInfoAction.Resp
Map<DeprecationIssue, List<String>> issueToListOfNodesMap = new HashMap<>(); Map<DeprecationIssue, List<String>> issueToListOfNodesMap = new HashMap<>();
for (List<Tuple<DeprecationIssue, String>> similarIssues : issuesToMerge) { for (List<Tuple<DeprecationIssue, String>> similarIssues : issuesToMerge) {
DeprecationIssue leastCommonDenominator = DeprecationIssue.getIntersectionOfRemovableSettings( DeprecationIssue leastCommonDenominator = DeprecationIssue.getIntersectionOfRemovableSettings(
similarIssues.stream().map(Tuple::v1).collect(Collectors.toList()) similarIssues.stream().map(Tuple::v1).toList()
); );
issueToListOfNodesMap.computeIfAbsent(leastCommonDenominator, (key) -> new ArrayList<>()) issueToListOfNodesMap.computeIfAbsent(leastCommonDenominator, (key) -> new ArrayList<>())
.addAll(similarIssues.stream().map(Tuple::v2).collect(Collectors.toList())); .addAll(similarIssues.stream().map(Tuple::v2).toList());
} }
return issueToListOfNodesMap; return issueToListOfNodesMap;
} }

View file

@ -23,7 +23,6 @@ import java.util.Map;
import java.util.function.BiConsumer; import java.util.function.BiConsumer;
import java.util.function.BiFunction; import java.util.function.BiFunction;
import java.util.function.Function; import java.util.function.Function;
import java.util.stream.Collectors;
/** /**
* Index-specific deprecation checks * Index-specific deprecation checks
@ -212,7 +211,7 @@ public class IndexDeprecationChecks {
); );
if (fields.size() > 0) { if (fields.size() > 0) {
String detailsMessageBeginning = fields.stream().collect(Collectors.joining(" ")); String detailsMessageBeginning = String.join(" ", fields);
return new DeprecationIssue( return new DeprecationIssue(
DeprecationIssue.Level.CRITICAL, DeprecationIssue.Level.CRITICAL,
"Date fields use deprecated camel case formats", "Date fields use deprecated camel case formats",

View file

@ -246,21 +246,19 @@ public class NodeDeprecationChecks {
Settings clusterSettings, Settings clusterSettings,
Settings nodeSettings Settings nodeSettings
) { ) {
List<Setting<?>> deprecatedConcreteNodeSettings = deprecatedAffixSetting.getAllConcreteSettings(nodeSettings) var deprecatedConcreteNodeSettings = deprecatedAffixSetting.getAllConcreteSettings(nodeSettings)
.sorted(Comparator.comparing(Setting::getKey)) .sorted(Comparator.comparing(Setting::getKey))
.collect(Collectors.toList()); .toList();
List<Setting<?>> deprecatedConcreteClusterSettings = deprecatedAffixSetting.getAllConcreteSettings(clusterSettings) var deprecatedConcreteClusterSettings = deprecatedAffixSetting.getAllConcreteSettings(clusterSettings)
.sorted(Comparator.comparing(Setting::getKey)) .sorted(Comparator.comparing(Setting::getKey))
.collect(Collectors.toList()); .toList();
if (deprecatedConcreteNodeSettings.isEmpty() && deprecatedConcreteClusterSettings.isEmpty()) { if (deprecatedConcreteNodeSettings.isEmpty() && deprecatedConcreteClusterSettings.isEmpty()) {
return null; return null;
} }
List<String> deprecatedNodeSettingKeys = deprecatedConcreteNodeSettings.stream().map(Setting::getKey).collect(Collectors.toList()); List<String> deprecatedNodeSettingKeys = deprecatedConcreteNodeSettings.stream().map(Setting::getKey).toList();
List<String> deprecatedClusterSettingKeys = deprecatedConcreteClusterSettings.stream() List<String> deprecatedClusterSettingKeys = deprecatedConcreteClusterSettings.stream().map(Setting::getKey).toList();
.map(Setting::getKey)
.collect(Collectors.toList());
final String concatSettingNames = Stream.concat(deprecatedNodeSettingKeys.stream(), deprecatedClusterSettingKeys.stream()) final String concatSettingNames = Stream.concat(deprecatedNodeSettingKeys.stream(), deprecatedClusterSettingKeys.stream())
.distinct() .distinct()
@ -289,10 +287,10 @@ public class NodeDeprecationChecks {
) { ) {
List<Setting<Settings>> deprecatedConcreteNodeSettings = deprecatedAffixSetting.getAllConcreteSettings(nodeSettings) List<Setting<Settings>> deprecatedConcreteNodeSettings = deprecatedAffixSetting.getAllConcreteSettings(nodeSettings)
.sorted(Comparator.comparing(Setting::getKey)) .sorted(Comparator.comparing(Setting::getKey))
.collect(Collectors.toList()); .toList();
List<Setting<Settings>> deprecatedConcreteClusterSettings = deprecatedAffixSetting.getAllConcreteSettings(clusterSettings) List<Setting<Settings>> deprecatedConcreteClusterSettings = deprecatedAffixSetting.getAllConcreteSettings(clusterSettings)
.sorted(Comparator.comparing(Setting::getKey)) .sorted(Comparator.comparing(Setting::getKey))
.collect(Collectors.toList()); .toList();
if (deprecatedConcreteNodeSettings.isEmpty() && deprecatedConcreteClusterSettings.isEmpty()) { if (deprecatedConcreteNodeSettings.isEmpty() && deprecatedConcreteClusterSettings.isEmpty()) {
return null; return null;
@ -305,19 +303,19 @@ public class NodeDeprecationChecks {
.map(key -> key + "*") .map(key -> key + "*")
.collect(Collectors.joining(",")); .collect(Collectors.joining(","));
// The actual group setting that are present in the settings objects, with full setting name prepended. // The actual group setting that are present in the settings objects, with full setting name prepended.
List<String> allNodeSubSettingKeys = deprecatedConcreteNodeSettings.stream().map(affixSetting -> { List<String> allNodeSubSettingKeys = deprecatedConcreteNodeSettings.stream().flatMap(affixSetting -> {
String groupPrefix = affixSetting.getKey(); String groupPrefix = affixSetting.getKey();
Settings groupSettings = affixSetting.get(nodeSettings); Settings groupSettings = affixSetting.get(nodeSettings);
Set<String> subSettings = groupSettings.keySet(); Set<String> subSettings = groupSettings.keySet();
return subSettings.stream().map(key -> groupPrefix + key).collect(Collectors.toList()); return subSettings.stream().map(key -> groupPrefix + key);
}).flatMap(List::stream).sorted().collect(Collectors.toList()); }).sorted().toList();
List<String> allClusterSubSettingKeys = deprecatedConcreteClusterSettings.stream().map(affixSetting -> { List<String> allClusterSubSettingKeys = deprecatedConcreteClusterSettings.stream().flatMap(affixSetting -> {
String groupPrefix = affixSetting.getKey(); String groupPrefix = affixSetting.getKey();
Settings groupSettings = affixSetting.get(clusterSettings); Settings groupSettings = affixSetting.get(clusterSettings);
Set<String> subSettings = groupSettings.keySet(); Set<String> subSettings = groupSettings.keySet();
return subSettings.stream().map(key -> groupPrefix + key).collect(Collectors.toList()); return subSettings.stream().map(key -> groupPrefix + key);
}).flatMap(List::stream).sorted().collect(Collectors.toList()); }).sorted().toList();
final String allSubSettings = Stream.concat(allNodeSubSettingKeys.stream(), allClusterSubSettingKeys.stream()) final String allSubSettings = Stream.concat(allNodeSubSettingKeys.stream(), allClusterSubSettingKeys.stream())
.distinct() .distinct()

View file

@ -151,9 +151,7 @@ public class TransportDeprecationInfoAction extends TransportMasterNodeReadActio
DeprecationChecker.Components components, DeprecationChecker.Components components,
ActionListener<Map<String, List<DeprecationIssue>>> listener ActionListener<Map<String, List<DeprecationIssue>>> listener
) { ) {
List<DeprecationChecker> enabledCheckers = checkers.stream() List<DeprecationChecker> enabledCheckers = checkers.stream().filter(c -> c.enabled(components.settings())).toList();
.filter(c -> c.enabled(components.settings()))
.collect(Collectors.toList());
if (enabledCheckers.isEmpty()) { if (enabledCheckers.isEmpty()) {
listener.onResponse(Collections.emptyMap()); listener.onResponse(Collections.emptyMap());
return; return;

View file

@ -441,7 +441,7 @@ public class EqlSearchIT extends ESRestTestCase {
} }
List<Object> actualList = new ArrayList<>(); List<Object> actualList = new ArrayList<>();
events.stream().forEach(m -> actualList.add(m.get("_id"))); events.forEach(m -> actualList.add(m.get("_id")));
if (false == expected.equals(actualList)) { if (false == expected.equals(actualList)) {
NotEqualMessageBuilder message = new NotEqualMessageBuilder(); NotEqualMessageBuilder message = new NotEqualMessageBuilder();

View file

@ -21,8 +21,6 @@ import java.util.LinkedHashSet;
import java.util.Objects; import java.util.Objects;
import java.util.Set; import java.util.Set;
import static java.util.stream.Collectors.toList;
public final class AnalysisUtils { public final class AnalysisUtils {
private AnalysisUtils() {} private AnalysisUtils() {}
@ -67,7 +65,7 @@ public final class AnalysisUtils {
"Reference [" "Reference ["
+ u.qualifiedName() + u.qualifiedName()
+ "] is ambiguous (to disambiguate use quotes or qualifiers); matches any of " + "] is ambiguous (to disambiguate use quotes or qualifiers); matches any of "
+ matches.stream().map(a -> "\"" + a.qualifier() + "\".\"" + a.name() + "\"").sorted().collect(toList()) + matches.stream().map(a -> "\"" + a.qualifier() + "\".\"" + a.name() + "\"").sorted().toList()
); );
} }

View file

@ -25,7 +25,6 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.AllocationService;
import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.IndexScopedSettings;
@ -66,7 +65,6 @@ import java.util.EnumSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.function.Supplier; import java.util.function.Supplier;
import java.util.stream.Collectors;
import static org.elasticsearch.xpack.core.ClientHelper.FLEET_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.FLEET_ORIGIN;
@ -306,10 +304,7 @@ public class Fleet extends Plugin implements SystemIndexPlugin {
if (dataStreamDescriptors.isEmpty() == false) { if (dataStreamDescriptors.isEmpty() == false) {
try { try {
Request request = new Request( Request request = new Request(
dataStreamDescriptors.stream() dataStreamDescriptors.stream().map(SystemDataStreamDescriptor::getDataStreamName).toArray(String[]::new)
.map(SystemDataStreamDescriptor::getDataStreamName)
.collect(Collectors.toList())
.toArray(Strings.EMPTY_ARRAY)
); );
EnumSet<Option> options = request.indicesOptions().options(); EnumSet<Option> options = request.indicesOptions().options();
options.add(Option.IGNORE_UNAVAILABLE); options.add(Option.IGNORE_UNAVAILABLE);

View file

@ -85,7 +85,7 @@ public class TransportGetLifecycleAction extends TransportMasterNodeAction<Reque
names = Arrays.asList(request.getPolicyNames()); names = Arrays.asList(request.getPolicyNames());
} }
if (names.size() > 1 && names.stream().filter(Regex::isSimpleMatchPattern).count() > 0) { if (names.size() > 1 && names.stream().anyMatch(Regex::isSimpleMatchPattern)) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"wildcard only supports a single value, please use comma-separated values or a single wildcard value" "wildcard only supports a single value, please use comma-separated values or a single wildcard value"
); );

View file

@ -120,8 +120,8 @@ public class TransportGetPipelineAction extends HandledTransportAction<GetPipeli
new GetPipelineResponse( new GetPipelineResponse(
Arrays.stream(mGetResponse.getResponses()) Arrays.stream(mGetResponse.getResponses())
.filter(itemResponse -> itemResponse.isFailed() == false) .filter(itemResponse -> itemResponse.isFailed() == false)
.filter(itemResponse -> itemResponse.getResponse().isExists())
.map(MultiGetItemResponse::getResponse) .map(MultiGetItemResponse::getResponse)
.filter(GetResponse::isExists)
.collect(Collectors.toMap(GetResponse::getId, GetResponse::getSourceAsBytesRef)) .collect(Collectors.toMap(GetResponse::getId, GetResponse::getSourceAsBytesRef))
) )
); );

View file

@ -32,7 +32,6 @@ import org.elasticsearch.gateway.GatewayService;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.core.ml.annotations.AnnotationIndex; import org.elasticsearch.xpack.core.ml.annotations.AnnotationIndex;
import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
@ -230,7 +229,7 @@ public class MlInitializationService implements ClusterStateListener {
updateSettingsListener.onResponse(AcknowledgedResponse.TRUE); updateSettingsListener.onResponse(AcknowledgedResponse.TRUE);
return; return;
} }
String nonHiddenIndicesString = Arrays.stream(nonHiddenIndices).collect(Collectors.joining(", ")); String nonHiddenIndicesString = String.join(", ", nonHiddenIndices);
logger.debug("The following ML internal indices will now be made hidden: [{}]", nonHiddenIndicesString); logger.debug("The following ML internal indices will now be made hidden: [{}]", nonHiddenIndicesString);
UpdateSettingsRequest updateSettingsRequest = new UpdateSettingsRequest().indices(nonHiddenIndices) UpdateSettingsRequest updateSettingsRequest = new UpdateSettingsRequest().indices(nonHiddenIndices)
.indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN) .indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN)

View file

@ -260,7 +260,7 @@ public class TransportGetTrainedModelsStatsAction extends HandledTransportAction
matchedDeployments.add(assignment.getDeploymentId()); matchedDeployments.add(assignment.getDeploymentId());
} }
} }
String deployments = matchedDeployments.stream().collect(Collectors.joining(",")); String deployments = String.join(",", matchedDeployments);
logger.debug("Fetching stats for deployments [{}]", deployments); logger.debug("Fetching stats for deployments [{}]", deployments);

View file

@ -53,7 +53,6 @@ import org.elasticsearch.xpack.ml.process.MlMemoryTracker;
import java.util.Optional; import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.function.Predicate; import java.util.function.Predicate;
import java.util.stream.Collectors;
import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.core.Strings.format;
import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN;
@ -187,7 +186,7 @@ public class TransportOpenJobAction extends TransportMasterNodeAction<OpenJobAct
referencedRuleFiltersPresentListener.onResponse(true); referencedRuleFiltersPresentListener.onResponse(true);
} else { } else {
GetFiltersAction.Request getFiltersRequest = new GetFiltersAction.Request(); GetFiltersAction.Request getFiltersRequest = new GetFiltersAction.Request();
getFiltersRequest.setResourceId(referencedRuleFilters.stream().collect(Collectors.joining(","))); getFiltersRequest.setResourceId(String.join(",", referencedRuleFilters));
getFiltersRequest.setAllowNoResources(false); getFiltersRequest.setAllowNoResources(false);
client.execute( client.execute(
GetFiltersAction.INSTANCE, GetFiltersAction.INSTANCE,

View file

@ -291,7 +291,7 @@ public class TransportSetUpgradeModeAction extends AcknowledgedTransportMasterNo
// We want to always have the same ordering of which tasks we un-allocate first. // We want to always have the same ordering of which tasks we un-allocate first.
// However, the order in which the distributed tasks handle the un-allocation event is not guaranteed. // However, the order in which the distributed tasks handle the un-allocation event is not guaranteed.
.sorted(Comparator.comparing(PersistentTask::getTaskName)) .sorted(Comparator.comparing(PersistentTask::getTaskName))
.collect(Collectors.toList()); .toList();
logger.info( logger.info(
"Un-assigning persistent tasks : " + mlTasks.stream().map(PersistentTask::getId).collect(Collectors.joining(", ", "[ ", " ]")) "Un-assigning persistent tasks : " + mlTasks.stream().map(PersistentTask::getId).collect(Collectors.joining(", ", "[ ", " ]"))

View file

@ -47,7 +47,6 @@ import java.util.Optional;
import java.util.PriorityQueue; import java.util.PriorityQueue;
import java.util.function.Consumer; import java.util.function.Consumer;
import java.util.function.Function; import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
import static java.time.Instant.ofEpochMilli; import static java.time.Instant.ofEpochMilli;
@ -454,12 +453,7 @@ class MlMemoryAutoscalingDecider {
if (unassignedJobs.isEmpty()) { if (unassignedJobs.isEmpty()) {
return Optional.empty(); return Optional.empty();
} }
List<Long> jobSizes = unassignedJobs.stream() List<Long> jobSizes = computeJobSizes(unassignedJobs, sizeFunction);
.map(sizeFunction)
.map(l -> l == null ? 0L : l)
.sorted(Comparator.comparingLong(Long::longValue).reversed())
.collect(Collectors.toList());
long tierMemory = 0L; long tierMemory = 0L;
// Node memory needs to be AT LEAST the size of the largest job + the required overhead. // Node memory needs to be AT LEAST the size of the largest job + the required overhead.
long nodeMemory = jobSizes.get(0); long nodeMemory = jobSizes.get(0);
@ -720,11 +714,7 @@ class MlMemoryAutoscalingDecider {
for (NodeLoad load : nodeLoads) { for (NodeLoad load : nodeLoads) {
mostFreeMemoryFirst.add(NodeLoad.builder(load)); mostFreeMemoryFirst.add(NodeLoad.builder(load));
} }
List<Long> jobSizes = unassignedJobs.stream() List<Long> jobSizes = computeJobSizes(unassignedJobs, sizeFunction);
.map(sizeFunction)
.map(l -> l == null ? 0L : l)
.sorted(Comparator.comparingLong(Long::longValue).reversed())
.collect(Collectors.toList());
Iterator<Long> assignmentIter = jobSizes.iterator(); Iterator<Long> assignmentIter = jobSizes.iterator();
while (jobSizes.size() > maxNumInQueue && assignmentIter.hasNext()) { while (jobSizes.size() > maxNumInQueue && assignmentIter.hasNext()) {
@ -945,4 +935,13 @@ class MlMemoryAutoscalingDecider {
private Long getAnomalyMemoryRequirement(PersistentTasksCustomMetadata.PersistentTask<?> task) { private Long getAnomalyMemoryRequirement(PersistentTasksCustomMetadata.PersistentTask<?> task) {
return getAnomalyMemoryRequirement(MlTasks.jobId(task.getId())); return getAnomalyMemoryRequirement(MlTasks.jobId(task.getId()));
} }
private static List<Long> computeJobSizes(List<String> unassignedJobs, Function<String, Long> sizeFunction) {
List<Long> jobSizes = new ArrayList<>(unassignedJobs.size());
for (String unassignedJob : unassignedJobs) {
jobSizes.add(Objects.requireNonNullElse(sizeFunction.apply(unassignedJob), 0L));
}
jobSizes.sort(Comparator.comparingLong(Long::longValue).reversed());
return jobSizes;
}
} }

View file

@ -25,7 +25,6 @@ import org.elasticsearch.xpack.ml.datafeed.persistence.DatafeedConfigProvider;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.stream.Collectors;
import static java.util.stream.Collectors.toList; import static java.util.stream.Collectors.toList;
import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.core.Strings.format;
@ -83,7 +82,7 @@ public class DatafeedConfigAutoUpdater implements MlAutoUpdateService.UpdateActi
.setId(datafeedConfig.getId()) .setId(datafeedConfig.getId())
.build() .build()
) )
.collect(Collectors.toList()); .toList();
if (updates.isEmpty()) { if (updates.isEmpty()) {
return; return;
} }

View file

@ -29,7 +29,6 @@ import java.util.List;
import java.util.NoSuchElementException; import java.util.NoSuchElementException;
import java.util.Objects; import java.util.Objects;
import java.util.Optional; import java.util.Optional;
import java.util.stream.Collectors;
/** /**
* Abstract class for aggregated data extractors, e.g. {@link RollupDataExtractor} * Abstract class for aggregated data extractors, e.g. {@link RollupDataExtractor}
@ -168,8 +167,7 @@ abstract class AbstractAggregationDataExtractor<T extends ActionRequestBuilder<S
} }
if (aggsAsList.size() > 1) { if (aggsAsList.size() > 1) {
throw new IllegalArgumentException( throw new IllegalArgumentException(
"Multiple top level aggregations not supported; found: " "Multiple top level aggregations not supported; found: " + aggsAsList.stream().map(Aggregation::getName).toList()
+ aggsAsList.stream().map(Aggregation::getName).collect(Collectors.toList())
); );
} }

View file

@ -146,7 +146,7 @@ public class RollupDataExtractorFactory implements DataExtractorFactory {
List<ParsedRollupCaps> validIntervalCaps = rollupCapsSet.stream() List<ParsedRollupCaps> validIntervalCaps = rollupCapsSet.stream()
.filter(rollupCaps -> validInterval(datafeedInterval, rollupCaps)) .filter(rollupCaps -> validInterval(datafeedInterval, rollupCaps))
.collect(Collectors.toList()); .toList();
if (validIntervalCaps.isEmpty()) { if (validIntervalCaps.isEmpty()) {
listener.onFailure( listener.onFailure(

View file

@ -67,7 +67,7 @@ public class TimeBasedExtractedFields extends ExtractedFields {
throw new IllegalArgumentException("cannot retrieve time field [" + timeField + "] because it is not aggregatable"); throw new IllegalArgumentException("cannot retrieve time field [" + timeField + "] because it is not aggregatable");
} }
ExtractedField timeExtractedField = extractedTimeField(timeField, scriptFields); ExtractedField timeExtractedField = extractedTimeField(timeField, scriptFields);
List<String> remainingFields = job.allInputFields().stream().filter(f -> f.equals(timeField) == false).collect(Collectors.toList()); List<String> remainingFields = job.allInputFields().stream().filter(f -> f.equals(timeField) == false).toList();
List<ExtractedField> allExtractedFields = new ArrayList<>(remainingFields.size() + 1); List<ExtractedField> allExtractedFields = new ArrayList<>(remainingFields.size() + 1);
allExtractedFields.add(timeExtractedField); allExtractedFields.add(timeExtractedField);
remainingFields.forEach(field -> allExtractedFields.add(extractionMethodDetector.detect(field))); remainingFields.forEach(field -> allExtractedFields.add(extractionMethodDetector.detect(field)));

View file

@ -65,7 +65,6 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors;
import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN;
import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin;
@ -346,7 +345,7 @@ public class DataFrameAnalyticsConfigProvider {
} }
Set<String> tasksWithoutConfigs = new HashSet<>(jobsWithTask); Set<String> tasksWithoutConfigs = new HashSet<>(jobsWithTask);
tasksWithoutConfigs.removeAll(configs.stream().map(DataFrameAnalyticsConfig::getId).collect(Collectors.toList())); configs.stream().map(DataFrameAnalyticsConfig::getId).toList().forEach(tasksWithoutConfigs::remove);
if (tasksWithoutConfigs.isEmpty() == false) { if (tasksWithoutConfigs.isEmpty() == false) {
logger.warn("Data frame analytics tasks {} have no configs", tasksWithoutConfigs); logger.warn("Data frame analytics tasks {} have no configs", tasksWithoutConfigs);
} }

View file

@ -16,6 +16,7 @@ import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.Comparator; import java.util.Comparator;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
@ -215,7 +216,7 @@ public class AssignmentPlan implements Comparable<AssignmentPlan> {
} }
StringBuilder msg = new StringBuilder(); StringBuilder msg = new StringBuilder();
List<Node> nodes = nodeToModel.keySet().stream().sorted(Comparator.comparing(Node::id)).collect(Collectors.toList()); List<Node> nodes = nodeToModel.keySet().stream().sorted(Comparator.comparing(Node::id)).toList();
for (int i = 0; i < nodes.size(); i++) { for (int i = 0; i < nodes.size(); i++) {
Node n = nodes.get(i); Node n = nodes.get(i);
msg.append(n); msg.append(n);
@ -223,7 +224,7 @@ public class AssignmentPlan implements Comparable<AssignmentPlan> {
for (Tuple<Deployment, Integer> modelAllocations : nodeToModel.get(n) for (Tuple<Deployment, Integer> modelAllocations : nodeToModel.get(n)
.stream() .stream()
.sorted(Comparator.comparing(x -> x.v1().id())) .sorted(Comparator.comparing(x -> x.v1().id()))
.collect(Collectors.toList())) { .toList()) {
if (modelAllocations.v2() > 0) { if (modelAllocations.v2() > 0) {
msg.append(" "); msg.append(" ");
msg.append(modelAllocations.v1().id()); msg.append(modelAllocations.v1().id());
@ -259,10 +260,10 @@ public class AssignmentPlan implements Comparable<AssignmentPlan> {
private final Map<Deployment, Integer> remainingModelAllocations; private final Map<Deployment, Integer> remainingModelAllocations;
private Builder(Collection<Node> nodes, Collection<Deployment> deployments) { private Builder(Collection<Node> nodes, Collection<Deployment> deployments) {
if (nodes.stream().collect(Collectors.toSet()).size() != nodes.size()) { if (new HashSet<>(nodes).size() != nodes.size()) {
throw new IllegalArgumentException("there should be no duplicate nodes"); throw new IllegalArgumentException("there should be no duplicate nodes");
} }
if (deployments.stream().collect(Collectors.toSet()).size() != deployments.size()) { if (new HashSet<>(deployments).size() != deployments.size()) {
throw new IllegalArgumentException("there should be no duplicate models"); throw new IllegalArgumentException("there should be no duplicate models");
} }

View file

@ -46,9 +46,7 @@ public class ZoneAwareAssignmentPlanner {
} }
private static Map<List<String>, List<Node>> sortByZone(Map<List<String>, List<Node>> nodesByZone) { private static Map<List<String>, List<Node>> sortByZone(Map<List<String>, List<Node>> nodesByZone) {
Map<List<String>, List<Node>> sortedByZone = new TreeMap<>( Map<List<String>, List<Node>> sortedByZone = new TreeMap<>(Comparator.comparing(zoneAttributes -> String.join("", zoneAttributes)));
Comparator.comparing(zoneAttributes -> zoneAttributes.stream().collect(Collectors.joining()))
);
sortedByZone.putAll(nodesByZone); sortedByZone.putAll(nodesByZone);
return sortedByZone; return sortedByZone;
} }

View file

@ -52,7 +52,7 @@ public class OverallBucketsAggregator implements OverallBucketsProcessor {
private OverallBucket outputBucket() { private OverallBucket outputBucket() {
List<OverallBucket.JobInfo> jobs = new ArrayList<>(maxScoreByJob.size()); List<OverallBucket.JobInfo> jobs = new ArrayList<>(maxScoreByJob.size());
maxScoreByJob.entrySet().stream().forEach(entry -> jobs.add(new OverallBucket.JobInfo(entry.getKey(), entry.getValue()))); maxScoreByJob.forEach((key, value) -> jobs.add(new OverallBucket.JobInfo(key, value)));
return new OverallBucket(new Date(startTime), bucketSpanSeconds, maxOverallScore, jobs, isInterim); return new OverallBucket(new Date(startTime), bucketSpanSeconds, maxOverallScore, jobs, isInterim);
} }
@ -65,7 +65,7 @@ public class OverallBucketsAggregator implements OverallBucketsProcessor {
private void processBucket(OverallBucket bucket) { private void processBucket(OverallBucket bucket) {
maxOverallScore = Math.max(maxOverallScore, bucket.getOverallScore()); maxOverallScore = Math.max(maxOverallScore, bucket.getOverallScore());
bucket.getJobs().stream().forEach(j -> { bucket.getJobs().forEach(j -> {
double currentMax = maxScoreByJob.computeIfAbsent(j.getJobId(), k -> 0.0); double currentMax = maxScoreByJob.computeIfAbsent(j.getJobId(), k -> 0.0);
if (j.getMaxAnomalyScore() > currentMax) { if (j.getMaxAnomalyScore() > currentMax) {
maxScoreByJob.put(j.getJobId(), j.getMaxAnomalyScore()); maxScoreByJob.put(j.getJobId(), j.getMaxAnomalyScore());

View file

@ -14,7 +14,6 @@ import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.stream.Collectors;
import static org.elasticsearch.xpack.ml.job.process.normalizer.Normalizable.ChildType.BUCKET_INFLUENCER; import static org.elasticsearch.xpack.ml.job.process.normalizer.Normalizable.ChildType.BUCKET_INFLUENCER;
@ -109,13 +108,10 @@ public class BucketNormalizable extends Normalizable {
@Override @Override
public List<Normalizable> getChildren(ChildType type) { public List<Normalizable> getChildren(ChildType type) {
List<Normalizable> children = new ArrayList<>(); List<Normalizable> children;
switch (type) { switch (type) {
case BUCKET_INFLUENCER -> children.addAll( case BUCKET_INFLUENCER -> children = new ArrayList<>(
bucket.getBucketInfluencers() bucket.getBucketInfluencers().stream().map(bi -> new BucketInfluencerNormalizable(bi, getOriginatingIndex())).toList()
.stream()
.map(bi -> new BucketInfluencerNormalizable(bi, getOriginatingIndex()))
.collect(Collectors.toList())
); );
default -> throw new IllegalArgumentException("Invalid type: " + type); default -> throw new IllegalArgumentException("Invalid type: " + type);
} }

View file

@ -108,11 +108,7 @@ public class InternalItemSetMapReduceAggregationTests extends InternalAggregatio
@Override @Override
public WordCounts map(Stream<Tuple<Field, List<Object>>> keyValues, WordCounts wordCounts) { public WordCounts map(Stream<Tuple<Field, List<Object>>> keyValues, WordCounts wordCounts) {
keyValues.forEach(v -> v.v2().forEach(word -> wordCounts.frequencies.merge((String) word, 1L, Long::sum)));
keyValues.forEach(v -> {
v.v2().stream().forEach(word -> { wordCounts.frequencies.compute((String) word, (k, c) -> (c == null) ? 1 : c + 1); });
});
return wordCounts; return wordCounts;
} }

View file

@ -704,7 +704,7 @@ public class IndexResolver {
for (String concreteIndex : concreteIndices) { for (String concreteIndex : concreteIndices) {
if (aliases.containsKey(concreteIndex)) { if (aliases.containsKey(concreteIndex)) {
List<AliasMetadata> concreteIndexAliases = aliases.get(concreteIndex); List<AliasMetadata> concreteIndexAliases = aliases.get(concreteIndex);
concreteIndexAliases.stream().forEach(e -> uniqueAliases.add(e.alias())); concreteIndexAliases.forEach(e -> uniqueAliases.add(e.alias()));
} }
} }
concreteIndices.addAll(uniqueAliases); concreteIndices.addAll(uniqueAliases);

View file

@ -8,12 +8,12 @@ package org.elasticsearch.xpack.ql.type;
import java.math.BigInteger; import java.math.BigInteger;
import java.time.ZonedDateTime; import java.time.ZonedDateTime;
import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.Comparator; import java.util.Comparator;
import java.util.Locale; import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.stream.Stream;
import static java.util.stream.Collectors.toMap; import static java.util.stream.Collectors.toMap;
import static java.util.stream.Collectors.toUnmodifiableMap; import static java.util.stream.Collectors.toUnmodifiableMap;
@ -53,7 +53,7 @@ public final class DataTypes {
public static final DataType NESTED = new DataType("nested", 0, false, false, false); public static final DataType NESTED = new DataType("nested", 0, false, false, false);
//end::noformat //end::noformat
private static final Collection<DataType> TYPES = Arrays.asList( private static final Collection<DataType> TYPES = Stream.of(
UNSUPPORTED, UNSUPPORTED,
NULL, NULL,
BOOLEAN, BOOLEAN,
@ -74,7 +74,7 @@ public final class DataTypes {
BINARY, BINARY,
OBJECT, OBJECT,
NESTED NESTED
).stream().sorted(Comparator.comparing(DataType::typeName)).toList(); ).sorted(Comparator.comparing(DataType::typeName)).toList();
private static final Map<String, DataType> NAME_TO_TYPE = TYPES.stream().collect(toUnmodifiableMap(DataType::typeName, t -> t)); private static final Map<String, DataType> NAME_TO_TYPE = TYPES.stream().collect(toUnmodifiableMap(DataType::typeName, t -> t));

View file

@ -209,7 +209,7 @@ public class TransportRollupSearchAction extends TransportAction<SearchRequest,
Set<RollupJobCaps> validatedCaps = new HashSet<>(); Set<RollupJobCaps> validatedCaps = new HashSet<>();
sourceAgg.getAggregatorFactories() sourceAgg.getAggregatorFactories()
.forEach(agg -> validatedCaps.addAll(RollupJobIdentifierUtils.findBestJobs(agg, context.getJobCaps()))); .forEach(agg -> validatedCaps.addAll(RollupJobIdentifierUtils.findBestJobs(agg, context.getJobCaps())));
List<String> jobIds = validatedCaps.stream().map(RollupJobCaps::getJobID).collect(Collectors.toList()); List<String> jobIds = validatedCaps.stream().map(RollupJobCaps::getJobID).toList();
for (AggregationBuilder agg : sourceAgg.getAggregatorFactories()) { for (AggregationBuilder agg : sourceAgg.getAggregatorFactories()) {

View file

@ -39,7 +39,6 @@ import java.util.Collections;
import java.util.LinkedHashSet; import java.util.LinkedHashSet;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.stream.Collectors;
import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg;
@ -343,7 +342,7 @@ public class PinnedQueryBuilder extends AbstractQueryBuilder<PinnedQueryBuilder>
if (idField == null) { if (idField == null) {
return new MatchNoDocsQuery("No mappings"); return new MatchNoDocsQuery("No mappings");
} }
List<Item> items = (docs != null) ? docs : ids.stream().map(id -> new Item(id)).collect(Collectors.toList()); List<Item> items = (docs != null) ? docs : ids.stream().map(id -> new Item(id)).toList();
if (items.isEmpty()) { if (items.isEmpty()) {
return new CappedScoreQuery(organicQuery.toQuery(context), MAX_ORGANIC_SCORE); return new CappedScoreQuery(organicQuery.toQuery(context), MAX_ORGANIC_SCORE);
} else { } else {

View file

@ -94,7 +94,7 @@ public class SearchableSnapshotsStatsResponse extends BroadcastResponse {
.map(ShardRouting::index) .map(ShardRouting::index)
.sorted(Index.COMPARE_BY_NAME) .sorted(Index.COMPARE_BY_NAME)
.distinct() .distinct()
.collect(toList()); .toList();
for (Index index : indices) { for (Index index : indices) {
builder.startObject(index.getName()); builder.startObject(index.getName());
@ -117,7 +117,7 @@ public class SearchableSnapshotsStatsResponse extends BroadcastResponse {
List<SearchableSnapshotShardStats> listOfStats = getStats().stream() List<SearchableSnapshotShardStats> listOfStats = getStats().stream()
.filter(dirStats -> dirStats.getShardRouting().index().equals(index)) .filter(dirStats -> dirStats.getShardRouting().index().equals(index))
.sorted(Comparator.comparingInt(dir -> dir.getShardRouting().getId())) .sorted(Comparator.comparingInt(dir -> dir.getShardRouting().getId()))
.collect(Collectors.toList()); .toList();
int minShard = listOfStats.stream() int minShard = listOfStats.stream()
.map(stat -> stat.getShardRouting().getId()) .map(stat -> stat.getShardRouting().getId())

Some files were not shown because too many files have changed in this diff Show more