diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/dependencies/patches/hdfs/HdfsClassPatcher.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/dependencies/patches/hdfs/HdfsClassPatcher.java new file mode 100644 index 000000000000..079ecab783ac --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/dependencies/patches/hdfs/HdfsClassPatcher.java @@ -0,0 +1,147 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.internal.dependencies.patches.hdfs; + +import org.gradle.api.artifacts.transform.CacheableTransform; +import org.gradle.api.artifacts.transform.InputArtifact; +import org.gradle.api.artifacts.transform.TransformAction; +import org.gradle.api.artifacts.transform.TransformOutputs; +import org.gradle.api.artifacts.transform.TransformParameters; +import org.gradle.api.file.FileSystemLocation; +import org.gradle.api.provider.Provider; +import org.gradle.api.tasks.Classpath; +import org.gradle.api.tasks.Input; +import org.gradle.api.tasks.Optional; +import org.jetbrains.annotations.NotNull; +import org.objectweb.asm.ClassReader; +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.ClassWriter; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.Enumeration; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.function.Function; +import java.util.jar.JarEntry; +import java.util.jar.JarFile; +import java.util.jar.JarOutputStream; +import java.util.regex.Pattern; + +import static java.util.Map.entry; + +@CacheableTransform +public abstract class HdfsClassPatcher implements TransformAction { + + record JarPatchers(String artifactTag, Pattern artifactPattern, Map> jarPatchers) {} + + static final List allPatchers = List.of( + new JarPatchers( + "hadoop-common", + Pattern.compile("hadoop-common-(?!.*tests)"), + Map.ofEntries( + entry("org/apache/hadoop/util/ShutdownHookManager.class", ShutdownHookManagerPatcher::new), + entry("org/apache/hadoop/util/Shell.class", ShellPatcher::new), + entry("org/apache/hadoop/security/UserGroupInformation.class", SubjectGetSubjectPatcher::new) + ) + ), + new JarPatchers( + "hadoop-client-api", + Pattern.compile("hadoop-client-api.*"), + Map.ofEntries( + entry("org/apache/hadoop/util/ShutdownHookManager.class", ShutdownHookManagerPatcher::new), + entry("org/apache/hadoop/util/Shell.class", ShellPatcher::new), + entry("org/apache/hadoop/security/UserGroupInformation.class", SubjectGetSubjectPatcher::new), + entry("org/apache/hadoop/security/authentication/client/KerberosAuthenticator.class", SubjectGetSubjectPatcher::new) + ) + ) + ); + + interface Parameters extends TransformParameters { + @Input + @Optional + List getMatchingArtifacts(); + + void setMatchingArtifacts(List matchingArtifacts); + } + + @Classpath + @InputArtifact + public abstract Provider getInputArtifact(); + + @Override + public void transform(@NotNull TransformOutputs outputs) { + File inputFile = getInputArtifact().get().getAsFile(); + + List matchingArtifacts = getParameters().getMatchingArtifacts(); + List patchersToApply = allPatchers.stream() + .filter(jp -> matchingArtifacts.contains(jp.artifactTag()) && jp.artifactPattern().matcher(inputFile.getName()).find()) + .toList(); + if (patchersToApply.isEmpty()) { + outputs.file(getInputArtifact()); + } else { + patchersToApply.forEach(patchers -> { + System.out.println("Patching " + inputFile.getName()); + + Map> jarPatchers = new HashMap<>(patchers.jarPatchers()); + File outputFile = outputs.file(inputFile.getName().replace(".jar", "-patched.jar")); + + patchJar(inputFile, outputFile, jarPatchers); + + if (jarPatchers.isEmpty() == false) { + throw new IllegalArgumentException( + String.format( + Locale.ROOT, + "error patching [%s] with [%s]: the jar does not contain [%s]", + inputFile.getName(), + patchers.artifactPattern().toString(), + String.join(", ", jarPatchers.keySet()) + ) + ); + } + }); + } + } + + private static void patchJar(File inputFile, File outputFile, Map> jarPatchers) { + try (JarFile jarFile = new JarFile(inputFile); JarOutputStream jos = new JarOutputStream(new FileOutputStream(outputFile))) { + Enumeration entries = jarFile.entries(); + while (entries.hasMoreElements()) { + JarEntry entry = entries.nextElement(); + String entryName = entry.getName(); + // Add the entry to the new JAR file + jos.putNextEntry(new JarEntry(entryName)); + + Function classPatcher = jarPatchers.remove(entryName); + if (classPatcher != null) { + byte[] classToPatch = jarFile.getInputStream(entry).readAllBytes(); + + ClassReader classReader = new ClassReader(classToPatch); + ClassWriter classWriter = new ClassWriter(classReader, 0); + classReader.accept(classPatcher.apply(classWriter), 0); + + jos.write(classWriter.toByteArray()); + } else { + // Read the entry's data and write it to the new JAR + try (InputStream is = jarFile.getInputStream(entry)) { + is.transferTo(jos); + } + } + jos.closeEntry(); + } + } catch (IOException ex) { + throw new RuntimeException(ex); + } + } +} diff --git a/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/MethodReplacement.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/dependencies/patches/hdfs/MethodReplacement.java similarity index 94% rename from plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/MethodReplacement.java rename to build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/dependencies/patches/hdfs/MethodReplacement.java index e07a32cc294a..7bc6a6c0d530 100644 --- a/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/MethodReplacement.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/dependencies/patches/hdfs/MethodReplacement.java @@ -7,7 +7,7 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.hdfs.patch; +package org.elasticsearch.gradle.internal.dependencies.patches.hdfs; import org.objectweb.asm.MethodVisitor; import org.objectweb.asm.Opcodes; diff --git a/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/ShellPatcher.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/dependencies/patches/hdfs/ShellPatcher.java similarity index 94% rename from plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/ShellPatcher.java rename to build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/dependencies/patches/hdfs/ShellPatcher.java index 397b63e434ba..ab63249f5c8e 100644 --- a/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/ShellPatcher.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/dependencies/patches/hdfs/ShellPatcher.java @@ -7,7 +7,7 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.hdfs.patch; +package org.elasticsearch.gradle.internal.dependencies.patches.hdfs; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.ClassWriter; diff --git a/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/ShutdownHookManagerPatcher.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/dependencies/patches/hdfs/ShutdownHookManagerPatcher.java similarity index 97% rename from plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/ShutdownHookManagerPatcher.java rename to build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/dependencies/patches/hdfs/ShutdownHookManagerPatcher.java index 1235b5af9002..4efe48a3bf72 100644 --- a/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/ShutdownHookManagerPatcher.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/dependencies/patches/hdfs/ShutdownHookManagerPatcher.java @@ -7,7 +7,7 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.hdfs.patch; +package org.elasticsearch.gradle.internal.dependencies.patches.hdfs; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.ClassWriter; diff --git a/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/SubjectGetSubjectPatcher.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/dependencies/patches/hdfs/SubjectGetSubjectPatcher.java similarity index 97% rename from plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/SubjectGetSubjectPatcher.java rename to build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/dependencies/patches/hdfs/SubjectGetSubjectPatcher.java index 3fb8a23be794..3b241f7001fb 100644 --- a/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/SubjectGetSubjectPatcher.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/dependencies/patches/hdfs/SubjectGetSubjectPatcher.java @@ -7,7 +7,7 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.hdfs.patch; +package org.elasticsearch.gradle.internal.dependencies.patches.hdfs; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.ClassWriter; diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java index aaa43900badb..97c70837c218 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java @@ -180,7 +180,7 @@ final class SystemJvmOptions { } // We instrument classes in these modules to call the bridge. Because the bridge gets patched // into java.base, we must export the bridge from java.base to these modules, as a comma-separated list - String modulesContainingEntitlementInstrumentation = "java.logging,java.net.http,java.naming"; + String modulesContainingEntitlementInstrumentation = "java.logging,java.net.http,java.naming,jdk.net"; return Stream.of( "-Des.entitlements.enabled=true", "-XX:+EnableDynamicAgentLoading", diff --git a/docs/changelog/117642.yaml b/docs/changelog/117642.yaml new file mode 100644 index 000000000000..dbddbbf5e64e --- /dev/null +++ b/docs/changelog/117642.yaml @@ -0,0 +1,5 @@ +pr: 117642 +summary: Adding endpoint creation validation to `ElasticInferenceService` +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/122458.yaml b/docs/changelog/122458.yaml new file mode 100644 index 000000000000..e28e22eb363b --- /dev/null +++ b/docs/changelog/122458.yaml @@ -0,0 +1,5 @@ +pr: 122458 +summary: '`DesiredBalanceReconciler` always returns `AllocationStats`' +area: Allocation +type: bug +issues: [] diff --git a/docs/changelog/122951.yaml b/docs/changelog/122951.yaml new file mode 100644 index 000000000000..b84e05d758fe --- /dev/null +++ b/docs/changelog/122951.yaml @@ -0,0 +1,6 @@ +pr: 122951 +summary: Updates the deprecation info API to not warn about system indices and data + streams +area: Indices APIs +type: bug +issues: [] diff --git a/libs/entitlement/asm-provider/build.gradle b/libs/entitlement/asm-provider/build.gradle index c6b51b7da3df..7bfd3e1f1f55 100644 --- a/libs/entitlement/asm-provider/build.gradle +++ b/libs/entitlement/asm-provider/build.gradle @@ -12,6 +12,7 @@ apply plugin: 'elasticsearch.build' dependencies { compileOnly project(':libs:entitlement') compileOnly project(':libs:core') + compileOnly project(':libs:logging') implementation 'org.ow2.asm:asm:9.7.1' testImplementation project(":test:framework") testImplementation project(":libs:entitlement:bridge") diff --git a/libs/entitlement/asm-provider/src/main/java/module-info.java b/libs/entitlement/asm-provider/src/main/java/module-info.java index f953454f93b9..ddd8cdf08da5 100644 --- a/libs/entitlement/asm-provider/src/main/java/module-info.java +++ b/libs/entitlement/asm-provider/src/main/java/module-info.java @@ -15,6 +15,7 @@ module org.elasticsearch.entitlement.instrumentation { requires org.elasticsearch.entitlement; requires static org.elasticsearch.base; // for SuppressForbidden + requires org.elasticsearch.logging; provides InstrumentationService with InstrumentationServiceImpl; } diff --git a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java index b10c58afacb1..4d8f0bce655b 100644 --- a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java +++ b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java @@ -12,6 +12,8 @@ package org.elasticsearch.entitlement.instrumentation.impl; import org.elasticsearch.entitlement.instrumentation.CheckMethod; import org.elasticsearch.entitlement.instrumentation.Instrumenter; import org.elasticsearch.entitlement.instrumentation.MethodKey; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; import org.objectweb.asm.AnnotationVisitor; import org.objectweb.asm.ClassReader; import org.objectweb.asm.ClassVisitor; @@ -36,6 +38,7 @@ import static org.objectweb.asm.Opcodes.INVOKESTATIC; import static org.objectweb.asm.Opcodes.INVOKEVIRTUAL; public class InstrumenterImpl implements Instrumenter { + private static final Logger logger = LogManager.getLogger(InstrumenterImpl.class); private final String getCheckerClassMethodDescriptor; private final String handleClass; @@ -155,10 +158,10 @@ public class InstrumenterImpl implements Instrumenter { var key = new MethodKey(className, name, Stream.of(Type.getArgumentTypes(descriptor)).map(Type::getInternalName).toList()); var instrumentationMethod = checkMethods.get(key); if (instrumentationMethod != null) { - // System.out.println("Will instrument method " + key); + logger.debug("Will instrument {}", key); return new EntitlementMethodVisitor(Opcodes.ASM9, mv, isStatic, isCtor, descriptor, instrumentationMethod); } else { - // System.out.println("Will not instrument method " + key); + logger.trace("Will not instrument {}", key); } } return mv; diff --git a/libs/entitlement/bridge/src/main/java/module-info.java b/libs/entitlement/bridge/src/main/java/module-info.java index b9055ec5fbf6..518a0a1ef29e 100644 --- a/libs/entitlement/bridge/src/main/java/module-info.java +++ b/libs/entitlement/bridge/src/main/java/module-info.java @@ -11,6 +11,7 @@ // At build and run time, the bridge is patched into the java.base module. module org.elasticsearch.entitlement.bridge { requires java.net.http; + requires jdk.net; exports org.elasticsearch.entitlement.bridge; } diff --git a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java index f113960be927..f0bbcd9b7d09 100644 --- a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java +++ b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java @@ -9,11 +9,14 @@ package org.elasticsearch.entitlement.bridge; +import jdk.nio.Channels; + import java.io.File; import java.io.FileDescriptor; import java.io.FileFilter; import java.io.FilenameFilter; import java.io.InputStream; +import java.io.OutputStream; import java.io.PrintStream; import java.io.PrintWriter; import java.lang.foreign.AddressLayout; @@ -58,14 +61,22 @@ import java.nio.file.AccessMode; import java.nio.file.CopyOption; import java.nio.file.DirectoryStream; import java.nio.file.FileStore; +import java.nio.file.FileVisitOption; +import java.nio.file.FileVisitor; import java.nio.file.LinkOption; import java.nio.file.OpenOption; import java.nio.file.Path; import java.nio.file.WatchEvent; import java.nio.file.WatchService; +import java.nio.file.attribute.BasicFileAttributes; import java.nio.file.attribute.FileAttribute; +import java.nio.file.attribute.FileAttributeView; +import java.nio.file.attribute.FileTime; +import java.nio.file.attribute.PosixFilePermission; import java.nio.file.attribute.UserPrincipal; import java.nio.file.spi.FileSystemProvider; +import java.security.KeyStore; +import java.security.Provider; import java.security.cert.CertStoreParameters; import java.util.List; import java.util.Locale; @@ -75,6 +86,7 @@ import java.util.Set; import java.util.TimeZone; import java.util.concurrent.ExecutorService; import java.util.concurrent.ForkJoinPool; +import java.util.function.BiPredicate; import java.util.function.Consumer; import javax.net.ssl.HostnameVerifier; @@ -621,19 +633,255 @@ public interface EntitlementChecker { void check$java_io_RandomAccessFile$(Class callerClass, File file, String mode); + void check$java_security_KeyStore$$getInstance(Class callerClass, File file, char[] password); + + void check$java_security_KeyStore$$getInstance(Class callerClass, File file, KeyStore.LoadStoreParameter param); + + void check$java_security_KeyStore$Builder$$newInstance(Class callerClass, File file, KeyStore.ProtectionParameter protection); + + void check$java_security_KeyStore$Builder$$newInstance( + Class callerClass, + String type, + Provider provider, + File file, + KeyStore.ProtectionParameter protection + ); + void check$java_util_Scanner$(Class callerClass, File source); void check$java_util_Scanner$(Class callerClass, File source, String charsetName); void check$java_util_Scanner$(Class callerClass, File source, Charset charset); + void check$java_util_jar_JarFile$(Class callerClass, String name); + + void check$java_util_jar_JarFile$(Class callerClass, String name, boolean verify); + + void check$java_util_jar_JarFile$(Class callerClass, File file); + + void check$java_util_jar_JarFile$(Class callerClass, File file, boolean verify); + + void check$java_util_jar_JarFile$(Class callerClass, File file, boolean verify, int mode); + + void check$java_util_jar_JarFile$(Class callerClass, File file, boolean verify, int mode, Runtime.Version version); + + void check$java_util_zip_ZipFile$(Class callerClass, String name); + + void check$java_util_zip_ZipFile$(Class callerClass, String name, Charset charset); + + void check$java_util_zip_ZipFile$(Class callerClass, File file); + + void check$java_util_zip_ZipFile$(Class callerClass, File file, int mode); + + void check$java_util_zip_ZipFile$(Class callerClass, File file, Charset charset); + + void check$java_util_zip_ZipFile$(Class callerClass, File file, int mode, Charset charset); + // nio + // channels + void check$java_nio_channels_FileChannel$(Class callerClass); + + void check$java_nio_channels_FileChannel$$open( + Class callerClass, + Path path, + Set options, + FileAttribute... attrs + ); + + void check$java_nio_channels_FileChannel$$open(Class callerClass, Path path, OpenOption... options); + + void check$java_nio_channels_AsynchronousFileChannel$(Class callerClass); + + void check$java_nio_channels_AsynchronousFileChannel$$open( + Class callerClass, + Path path, + Set options, + ExecutorService executor, + FileAttribute... attrs + ); + + void check$java_nio_channels_AsynchronousFileChannel$$open(Class callerClass, Path path, OpenOption... options); + + void check$jdk_nio_Channels$$readWriteSelectableChannel( + Class callerClass, + FileDescriptor fd, + Channels.SelectableChannelCloser closer + ); + + // files void check$java_nio_file_Files$$getOwner(Class callerClass, Path path, LinkOption... options); void check$java_nio_file_Files$$probeContentType(Class callerClass, Path path); void check$java_nio_file_Files$$setOwner(Class callerClass, Path path, UserPrincipal principal); + void check$java_nio_file_Files$$newInputStream(Class callerClass, Path path, OpenOption... options); + + void check$java_nio_file_Files$$newOutputStream(Class callerClass, Path path, OpenOption... options); + + void check$java_nio_file_Files$$newByteChannel( + Class callerClass, + Path path, + Set options, + FileAttribute... attrs + ); + + void check$java_nio_file_Files$$newByteChannel(Class callerClass, Path path, OpenOption... options); + + void check$java_nio_file_Files$$newDirectoryStream(Class callerClass, Path dir); + + void check$java_nio_file_Files$$newDirectoryStream(Class callerClass, Path dir, String glob); + + void check$java_nio_file_Files$$newDirectoryStream(Class callerClass, Path dir, DirectoryStream.Filter filter); + + void check$java_nio_file_Files$$createFile(Class callerClass, Path path, FileAttribute... attrs); + + void check$java_nio_file_Files$$createDirectory(Class callerClass, Path dir, FileAttribute... attrs); + + void check$java_nio_file_Files$$createDirectories(Class callerClass, Path dir, FileAttribute... attrs); + + void check$java_nio_file_Files$$createTempFile(Class callerClass, Path dir, String prefix, String suffix, FileAttribute... attrs); + + void check$java_nio_file_Files$$createTempFile(Class callerClass, String prefix, String suffix, FileAttribute... attrs); + + void check$java_nio_file_Files$$createTempDirectory(Class callerClass, Path dir, String prefix, FileAttribute... attrs); + + void check$java_nio_file_Files$$createTempDirectory(Class callerClass, String prefix, FileAttribute... attrs); + + void check$java_nio_file_Files$$createSymbolicLink(Class callerClass, Path link, Path target, FileAttribute... attrs); + + void check$java_nio_file_Files$$createLink(Class callerClass, Path link, Path existing); + + void check$java_nio_file_Files$$delete(Class callerClass, Path path); + + void check$java_nio_file_Files$$deleteIfExists(Class callerClass, Path path); + + void check$java_nio_file_Files$$copy(Class callerClass, Path source, Path target, CopyOption... options); + + void check$java_nio_file_Files$$move(Class callerClass, Path source, Path target, CopyOption... options); + + void check$java_nio_file_Files$$readSymbolicLink(Class callerClass, Path link); + + void check$java_nio_file_Files$$getFileStore(Class callerClass, Path path); + + void check$java_nio_file_Files$$isSameFile(Class callerClass, Path path, Path path2); + + void check$java_nio_file_Files$$mismatch(Class callerClass, Path path, Path path2); + + void check$java_nio_file_Files$$isHidden(Class callerClass, Path path); + + void check$java_nio_file_Files$$getFileAttributeView( + Class callerClass, + Path path, + Class type, + LinkOption... options + ); + + void check$java_nio_file_Files$$readAttributes( + Class callerClass, + Path path, + Class type, + LinkOption... options + ); + + void check$java_nio_file_Files$$setAttribute(Class callerClass, Path path, String attribute, Object value, LinkOption... options); + + void check$java_nio_file_Files$$getAttribute(Class callerClass, Path path, String attribute, LinkOption... options); + + void check$java_nio_file_Files$$readAttributes(Class callerClass, Path path, String attributes, LinkOption... options); + + void check$java_nio_file_Files$$getPosixFilePermissions(Class callerClass, Path path, LinkOption... options); + + void check$java_nio_file_Files$$setPosixFilePermissions(Class callerClass, Path path, Set perms); + + void check$java_nio_file_Files$$isSymbolicLink(Class callerClass, Path path); + + void check$java_nio_file_Files$$isDirectory(Class callerClass, Path path, LinkOption... options); + + void check$java_nio_file_Files$$isRegularFile(Class callerClass, Path path, LinkOption... options); + + void check$java_nio_file_Files$$getLastModifiedTime(Class callerClass, Path path, LinkOption... options); + + void check$java_nio_file_Files$$setLastModifiedTime(Class callerClass, Path path, FileTime time); + + void check$java_nio_file_Files$$size(Class callerClass, Path path); + + void check$java_nio_file_Files$$exists(Class callerClass, Path path, LinkOption... options); + + void check$java_nio_file_Files$$notExists(Class callerClass, Path path, LinkOption... options); + + void check$java_nio_file_Files$$isReadable(Class callerClass, Path path); + + void check$java_nio_file_Files$$isWritable(Class callerClass, Path path); + + void check$java_nio_file_Files$$isExecutable(Class callerClass, Path path); + + void check$java_nio_file_Files$$walkFileTree( + Class callerClass, + Path start, + Set options, + int maxDepth, + FileVisitor visitor + ); + + void check$java_nio_file_Files$$walkFileTree(Class callerClass, Path start, FileVisitor visitor); + + void check$java_nio_file_Files$$newBufferedReader(Class callerClass, Path path, Charset cs); + + void check$java_nio_file_Files$$newBufferedReader(Class callerClass, Path path); + + void check$java_nio_file_Files$$newBufferedWriter(Class callerClass, Path path, Charset cs, OpenOption... options); + + void check$java_nio_file_Files$$newBufferedWriter(Class callerClass, Path path, OpenOption... options); + + void check$java_nio_file_Files$$copy(Class callerClass, InputStream in, Path target, CopyOption... options); + + void check$java_nio_file_Files$$copy(Class callerClass, Path source, OutputStream out); + + void check$java_nio_file_Files$$readAllBytes(Class callerClass, Path path); + + void check$java_nio_file_Files$$readString(Class callerClass, Path path); + + void check$java_nio_file_Files$$readString(Class callerClass, Path path, Charset cs); + + void check$java_nio_file_Files$$readAllLines(Class callerClass, Path path, Charset cs); + + void check$java_nio_file_Files$$readAllLines(Class callerClass, Path path); + + void check$java_nio_file_Files$$write(Class callerClass, Path path, byte[] bytes, OpenOption... options); + + void check$java_nio_file_Files$$write( + Class callerClass, + Path path, + Iterable lines, + Charset cs, + OpenOption... options + ); + + void check$java_nio_file_Files$$write(Class callerClass, Path path, Iterable lines, OpenOption... options); + + void check$java_nio_file_Files$$writeString(Class callerClass, Path path, CharSequence csq, OpenOption... options); + + void check$java_nio_file_Files$$writeString(Class callerClass, Path path, CharSequence csq, Charset cs, OpenOption... options); + + void check$java_nio_file_Files$$list(Class callerClass, Path dir); + + void check$java_nio_file_Files$$walk(Class callerClass, Path start, int maxDepth, FileVisitOption... options); + + void check$java_nio_file_Files$$walk(Class callerClass, Path start, FileVisitOption... options); + + void check$java_nio_file_Files$$find( + Class callerClass, + Path start, + int maxDepth, + BiPredicate matcher, + FileVisitOption... options + ); + + void check$java_nio_file_Files$$lines(Class callerClass, Path path, Charset cs); + + void check$java_nio_file_Files$$lines(Class callerClass, Path path); + // file system providers void check$java_nio_file_spi_FileSystemProvider$(Class callerClass); diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/module-info.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/module-info.java index bb4c6fd75942..ee2ae33d3489 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/module-info.java +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/module-info.java @@ -16,4 +16,5 @@ module org.elasticsearch.entitlement.qa.test { // Modules we'll attempt to use in order to exercise entitlements requires java.logging; requires java.net.http; + requires jdk.net; } diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/DummyImplementations.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/DummyImplementations.java index ca0301463407..929ec4ce731d 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/DummyImplementations.java +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/DummyImplementations.java @@ -9,6 +9,10 @@ package org.elasticsearch.entitlement.qa.test; +import jdk.nio.Channels; + +import org.elasticsearch.core.SuppressForbidden; + import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -24,14 +28,23 @@ import java.net.SocketAddress; import java.net.SocketException; import java.net.SocketImpl; import java.net.URI; +import java.nio.ByteBuffer; +import java.nio.MappedByteBuffer; import java.nio.channels.AsynchronousChannelGroup; +import java.nio.channels.AsynchronousFileChannel; import java.nio.channels.AsynchronousServerSocketChannel; import java.nio.channels.AsynchronousSocketChannel; +import java.nio.channels.CompletionHandler; import java.nio.channels.DatagramChannel; +import java.nio.channels.FileChannel; +import java.nio.channels.FileLock; import java.nio.channels.Pipe; +import java.nio.channels.ReadableByteChannel; import java.nio.channels.SeekableByteChannel; +import java.nio.channels.SelectableChannel; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; +import java.nio.channels.WritableByteChannel; import java.nio.channels.spi.AbstractSelector; import java.nio.channels.spi.AsynchronousChannelProvider; import java.nio.channels.spi.SelectorProvider; @@ -67,6 +80,7 @@ import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutorService; +import java.util.concurrent.Future; import java.util.concurrent.ThreadFactory; import java.util.spi.CalendarDataProvider; import java.util.spi.CalendarNameProvider; @@ -676,4 +690,162 @@ class DummyImplementations { } } + + static class DummyFileChannel extends FileChannel { + @Override + protected void implCloseChannel() throws IOException { + + } + + @Override + public int read(ByteBuffer dst) throws IOException { + return 0; + } + + @Override + public long read(ByteBuffer[] dsts, int offset, int length) throws IOException { + return 0; + } + + @Override + public int write(ByteBuffer src) throws IOException { + return 0; + } + + @Override + public long write(ByteBuffer[] srcs, int offset, int length) throws IOException { + return 0; + } + + @Override + public long position() throws IOException { + return 0; + } + + @Override + public FileChannel position(long newPosition) throws IOException { + return null; + } + + @Override + public long size() throws IOException { + return 0; + } + + @Override + public FileChannel truncate(long size) throws IOException { + return null; + } + + @Override + public void force(boolean metaData) throws IOException { + + } + + @Override + public long transferTo(long position, long count, WritableByteChannel target) throws IOException { + return 0; + } + + @Override + public long transferFrom(ReadableByteChannel src, long position, long count) throws IOException { + return 0; + } + + @Override + public int read(ByteBuffer dst, long position) throws IOException { + return 0; + } + + @Override + public int write(ByteBuffer src, long position) throws IOException { + return 0; + } + + @Override + public MappedByteBuffer map(MapMode mode, long position, long size) throws IOException { + return null; + } + + @Override + public FileLock lock(long position, long size, boolean shared) throws IOException { + return null; + } + + @Override + public FileLock tryLock(long position, long size, boolean shared) throws IOException { + return null; + } + } + + static class DummyAsynchronousFileChannel extends AsynchronousFileChannel { + @Override + public boolean isOpen() { + return false; + } + + @Override + public void close() throws IOException { + + } + + @Override + public long size() throws IOException { + return 0; + } + + @Override + public AsynchronousFileChannel truncate(long size) throws IOException { + return null; + } + + @Override + public void force(boolean metaData) throws IOException { + + } + + @Override + public void lock(long position, long size, boolean shared, A attachment, CompletionHandler handler) { + + } + + @Override + public Future lock(long position, long size, boolean shared) { + return null; + } + + @Override + public FileLock tryLock(long position, long size, boolean shared) throws IOException { + return null; + } + + @Override + public void read(ByteBuffer dst, long position, A attachment, CompletionHandler handler) { + + } + + @Override + public Future read(ByteBuffer dst, long position) { + return null; + } + + @Override + public void write(ByteBuffer src, long position, A attachment, CompletionHandler handler) { + + } + + @Override + public Future write(ByteBuffer src, long position) { + return null; + } + } + + @SuppressForbidden(reason = "specifically testing readWriteSelectableChannel") + static class DummySelectableChannelCloser implements Channels.SelectableChannelCloser { + @Override + public void implCloseChannel(SelectableChannel sc) throws IOException {} + + @Override + public void implReleaseChannel(SelectableChannel sc) throws IOException {} + } } diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileCheckActions.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileCheckActions.java index bedb8790c1ad..aa0fdc33a36e 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileCheckActions.java +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileCheckActions.java @@ -9,6 +9,7 @@ package org.elasticsearch.entitlement.qa.test; +import org.elasticsearch.core.CheckedRunnable; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.entitlement.qa.entitled.EntitledActions; @@ -22,16 +23,24 @@ import java.io.FileWriter; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.charset.StandardCharsets; -import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; -import java.nio.file.attribute.UserPrincipal; +import java.security.GeneralSecurityException; +import java.security.KeyStore; import java.util.Scanner; +import java.util.jar.JarFile; +import java.util.zip.ZipException; +import java.util.zip.ZipFile; +import static java.nio.charset.Charset.defaultCharset; +import static java.util.zip.ZipFile.OPEN_DELETE; +import static java.util.zip.ZipFile.OPEN_READ; +import static org.elasticsearch.entitlement.qa.entitled.EntitledActions.createTempFileForWrite; import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.ALWAYS_DENIED; import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.PLUGINS; @SuppressForbidden(reason = "Explicitly checking APIs that are forbidden") +@SuppressWarnings("unused") // Called via reflection class FileCheckActions { static Path testRootDir = Paths.get(System.getProperty("es.entitlements.testdir")); @@ -207,21 +216,6 @@ class FileCheckActions { readWriteFile().toFile().setWritable(true, false); } - @EntitlementTest(expectedAccess = PLUGINS) - static void createScannerFile() throws FileNotFoundException { - new Scanner(readFile().toFile()); - } - - @EntitlementTest(expectedAccess = PLUGINS) - static void createScannerFileWithCharset() throws IOException { - new Scanner(readFile().toFile(), StandardCharsets.UTF_8); - } - - @EntitlementTest(expectedAccess = PLUGINS) - static void createScannerFileWithCharsetName() throws FileNotFoundException { - new Scanner(readFile().toFile(), "UTF-8"); - } - @EntitlementTest(expectedAccess = PLUGINS) static void createFileInputStreamFile() throws IOException { new FileInputStream(readFile().toFile()).close(); @@ -348,19 +342,138 @@ class FileCheckActions { } @EntitlementTest(expectedAccess = PLUGINS) - static void filesGetOwner() throws IOException { - Files.getOwner(readFile()); + static void keystoreGetInstance_FileCharArray() throws IOException { + try { + KeyStore.getInstance(readFile().toFile(), new char[0]); + } catch (GeneralSecurityException expected) { + return; + } + throw new AssertionError("Expected an exception"); } @EntitlementTest(expectedAccess = PLUGINS) - static void filesProbeContentType() throws IOException { - Files.probeContentType(readFile()); + static void keystoreGetInstance_FileLoadStoreParameter() throws IOException { + try { + KeyStore.LoadStoreParameter loadStoreParameter = () -> null; + KeyStore.getInstance(readFile().toFile(), loadStoreParameter); + } catch (GeneralSecurityException expected) { + return; + } + throw new AssertionError("Expected an exception"); } @EntitlementTest(expectedAccess = PLUGINS) - static void filesSetOwner() throws IOException { - UserPrincipal owner = EntitledActions.getFileOwner(readWriteFile()); - Files.setOwner(readWriteFile(), owner); // set to existing owner, just trying to execute the method + static void keystoreBuilderNewInstance() { + try { + KeyStore.Builder.newInstance("", null, readFile().toFile(), null); + } catch (NullPointerException expected) { + return; + } + throw new AssertionError("Expected an exception"); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void zipFile_String() throws IOException { + expectZipException(() -> new ZipFile(readFile().toString()).close()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void zipFile_StringCharset() throws IOException { + expectZipException(() -> new ZipFile(readFile().toString(), defaultCharset()).close()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void zipFile_File() throws IOException { + expectZipException(() -> new ZipFile(readFile().toFile()).close()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void zipFile_FileCharset() throws IOException { + expectZipException(() -> new ZipFile(readFile().toFile(), defaultCharset()).close()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void zipFile_FileReadOnly() throws IOException { + expectZipException(() -> new ZipFile(readFile().toFile(), OPEN_READ).close()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void zipFile_FileReadAndDelete() throws IOException { + expectZipException(() -> new ZipFile(createTempFileForWrite().toFile(), OPEN_READ | OPEN_DELETE).close()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void zipFile_ReadOnlyCharset() throws IOException { + expectZipException(() -> new ZipFile(readFile().toFile(), OPEN_READ, defaultCharset()).close()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void zipFile_ReadAndDeleteCharset() throws IOException { + expectZipException(() -> new ZipFile(createTempFileForWrite().toFile(), OPEN_READ | OPEN_DELETE, defaultCharset()).close()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void jarFile_String() throws IOException { + expectZipException(() -> new JarFile(readFile().toString()).close()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void jarFile_StringBoolean() throws IOException { + expectZipException(() -> new JarFile(readFile().toString(), false).close()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void jarFile_FileReadOnly() throws IOException { + expectZipException(() -> new JarFile(readFile().toFile(), false, OPEN_READ).close()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void jarFile_FileReadAndDelete() throws IOException { + expectZipException(() -> new JarFile(createTempFileForWrite().toFile(), false, OPEN_READ | OPEN_DELETE).close()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void jarFile_FileBooleanReadOnlyVersion() throws IOException { + expectZipException(() -> new JarFile(readFile().toFile(), false, OPEN_READ, Runtime.version()).close()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void jarFile_FileBooleanReadAndDeleteOnlyVersion() throws IOException { + expectZipException(() -> new JarFile(createTempFileForWrite().toFile(), false, OPEN_READ | OPEN_DELETE, Runtime.version()).close()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void jarFile_File() throws IOException { + expectZipException(() -> new JarFile(readFile().toFile()).close()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void jarFileFileBoolean() throws IOException { + expectZipException(() -> new JarFile(readFile().toFile(), false).close()); + } + + private static void expectZipException(CheckedRunnable action) throws IOException { + try { + action.run(); + } catch (ZipException expected) { + return; + } + throw new AssertionError("Expected an exception"); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void createScannerFile() throws FileNotFoundException { + new Scanner(readFile().toFile()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void createScannerFileWithCharset() throws IOException { + new Scanner(readFile().toFile(), StandardCharsets.UTF_8); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void createScannerFileWithCharsetName() throws FileNotFoundException { + new Scanner(readFile().toFile(), "UTF-8"); } private FileCheckActions() {} diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/NioChannelsActions.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/NioChannelsActions.java new file mode 100644 index 000000000000..777f0fbf67a9 --- /dev/null +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/NioChannelsActions.java @@ -0,0 +1,87 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.qa.test; + +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.entitlement.qa.entitled.EntitledActions; + +import java.io.FileDescriptor; +import java.io.IOException; +import java.nio.channels.AsynchronousFileChannel; +import java.nio.channels.FileChannel; +import java.nio.file.StandardOpenOption; +import java.util.Set; + +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.ALWAYS_DENIED; +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.PLUGINS; + +class NioChannelsActions { + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createFileChannel() throws IOException { + new DummyImplementations.DummyFileChannel().close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileChannelOpenForWrite() throws IOException { + FileChannel.open(FileCheckActions.readWriteFile(), StandardOpenOption.WRITE).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileChannelOpenForRead() throws IOException { + FileChannel.open(FileCheckActions.readFile()).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileChannelOpenForWriteWithOptions() throws IOException { + FileChannel.open(FileCheckActions.readWriteFile(), Set.of(StandardOpenOption.WRITE)).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void fileChannelOpenForReadWithOptions() throws IOException { + FileChannel.open(FileCheckActions.readFile(), Set.of(StandardOpenOption.READ)).close(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createAsynchronousFileChannel() throws IOException { + new DummyImplementations.DummyAsynchronousFileChannel().close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void asynchronousFileChannelOpenForWrite() throws IOException { + var file = EntitledActions.createTempFileForWrite(); + AsynchronousFileChannel.open(file, StandardOpenOption.WRITE).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void asynchronousFileChannelOpenForRead() throws IOException { + var file = EntitledActions.createTempFileForRead(); + AsynchronousFileChannel.open(file).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void asynchronousFileChannelOpenForWriteWithOptions() throws IOException { + var file = EntitledActions.createTempFileForWrite(); + AsynchronousFileChannel.open(file, Set.of(StandardOpenOption.WRITE), EsExecutors.DIRECT_EXECUTOR_SERVICE).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void asynchronousFileChannelOpenForReadWithOptions() throws IOException { + var file = EntitledActions.createTempFileForRead(); + AsynchronousFileChannel.open(file, Set.of(StandardOpenOption.READ), EsExecutors.DIRECT_EXECUTOR_SERVICE).close(); + } + + @SuppressForbidden(reason = "specifically testing jdk.nio.Channels") + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void channelsReadWriteSelectableChannel() throws IOException { + jdk.nio.Channels.readWriteSelectableChannel(new FileDescriptor(), new DummyImplementations.DummySelectableChannelCloser()).close(); + } +} diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/NioFilesActions.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/NioFilesActions.java new file mode 100644 index 000000000000..d4e8938b4b23 --- /dev/null +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/NioFilesActions.java @@ -0,0 +1,475 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.qa.test; + +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.entitlement.qa.entitled.EntitledActions; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.nio.file.FileSystemException; +import java.nio.file.FileVisitOption; +import java.nio.file.FileVisitResult; +import java.nio.file.FileVisitor; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.nio.file.attribute.BasicFileAttributes; +import java.nio.file.attribute.FileOwnerAttributeView; +import java.nio.file.attribute.FileTime; +import java.nio.file.attribute.UserPrincipal; +import java.time.Instant; +import java.util.List; +import java.util.Set; + +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.ALWAYS_DENIED; +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.PLUGINS; +import static org.elasticsearch.entitlement.qa.test.FileCheckActions.readDir; +import static org.elasticsearch.entitlement.qa.test.FileCheckActions.readFile; +import static org.elasticsearch.entitlement.qa.test.FileCheckActions.readWriteDir; +import static org.elasticsearch.entitlement.qa.test.FileCheckActions.readWriteFile; + +class NioFilesActions { + + @EntitlementTest(expectedAccess = PLUGINS) + static void filesGetOwner() throws IOException { + Files.getOwner(readFile()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void filesProbeContentType() throws IOException { + Files.probeContentType(readFile()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void filesSetOwner() throws IOException { + UserPrincipal owner = EntitledActions.getFileOwner(readWriteFile()); + Files.setOwner(readWriteFile(), owner); // set to existing owner, just trying to execute the method + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesNewInputStream() throws IOException { + Files.newInputStream(readFile()).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesNewOutputStream() throws IOException { + Files.newOutputStream(readWriteFile()).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesNewByteChannelRead() throws IOException { + Files.newByteChannel(readFile(), Set.of(StandardOpenOption.READ)).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesNewByteChannelWrite() throws IOException { + Files.newByteChannel(readWriteFile(), Set.of(StandardOpenOption.WRITE)).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesNewByteChannelReadVarargs() throws IOException { + Files.newByteChannel(readFile(), StandardOpenOption.READ).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesNewByteChannelWriteVarargs() throws IOException { + Files.newByteChannel(readWriteFile(), StandardOpenOption.WRITE).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesNewDirectoryStream() throws IOException { + Files.newDirectoryStream(FileCheckActions.readDir()).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesNewDirectoryStreamGlob() throws IOException { + Files.newDirectoryStream(FileCheckActions.readDir(), "*").close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesNewDirectoryStreamFilter() throws IOException { + Files.newDirectoryStream(FileCheckActions.readDir(), entry -> false).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesCreateFile() throws IOException { + Files.createFile(readWriteDir().resolve("file.txt")); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesCreateDirectory() throws IOException { + var directory = EntitledActions.createTempDirectoryForWrite(); + Files.createDirectory(directory.resolve("subdir")); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesCreateDirectories() throws IOException { + var directory = EntitledActions.createTempDirectoryForWrite(); + Files.createDirectories(directory.resolve("subdir").resolve("subsubdir")); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesCreateTempFileInDir() throws IOException { + Files.createTempFile(readWriteDir(), "prefix", "suffix"); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesCreateTempDirectoryInDir() throws IOException { + Files.createTempDirectory(readWriteDir(), "prefix"); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesCreateSymbolicLink() throws IOException { + var directory = EntitledActions.createTempDirectoryForWrite(); + try { + Files.createSymbolicLink(directory.resolve("link"), readFile()); + } catch (UnsupportedOperationException | FileSystemException e) { + // OK not to implement symbolic link in the filesystem + } + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesCreateLink() throws IOException { + var directory = EntitledActions.createTempDirectoryForWrite(); + try { + Files.createLink(directory.resolve("link"), readFile()); + } catch (UnsupportedOperationException | FileSystemException e) { + // OK not to implement symbolic link in the filesystem + } + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesDelete() throws IOException { + var file = EntitledActions.createTempFileForWrite(); + Files.delete(file); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesDeleteIfExists() throws IOException { + var file = EntitledActions.createTempFileForWrite(); + Files.deleteIfExists(file); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesReadSymbolicLink() throws IOException { + var link = EntitledActions.createTempSymbolicLink(); + Files.readSymbolicLink(link); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesCopy() throws IOException { + var directory = EntitledActions.createTempDirectoryForWrite(); + Files.copy(readFile(), directory.resolve("copied")); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesMove() throws IOException { + var directory = EntitledActions.createTempDirectoryForWrite(); + var file = EntitledActions.createTempFileForWrite(); + Files.move(file, directory.resolve("moved")); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesIsSameFile() throws IOException { + Files.isSameFile(readWriteFile(), readFile()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesMismatch() throws IOException { + Files.mismatch(readWriteFile(), readFile()); + } + + @SuppressForbidden(reason = "testing entitlements on this API specifically") + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesIsHidden() throws IOException { + Files.isHidden(readFile()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesGetFileStore() throws IOException { + var file = EntitledActions.createTempFileForRead(); + Files.getFileStore(file); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void checkFilesGetFileAttributeView() { + Files.getFileAttributeView(readFile(), FileOwnerAttributeView.class); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesReadAttributesWithClass() throws IOException { + Files.readAttributes(readFile(), BasicFileAttributes.class); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesReadAttributesWithString() throws IOException { + Files.readAttributes(readFile(), "*"); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesGetAttribute() throws IOException { + try { + Files.getAttribute(readFile(), "dos:hidden"); + } catch (UnsupportedOperationException | IllegalArgumentException | FileSystemException e) { + // OK if the file does not have/does not support the attribute + } + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesSetAttribute() throws IOException { + var file = EntitledActions.createTempFileForWrite(); + try { + Files.setAttribute(file, "dos:hidden", true); + } catch (UnsupportedOperationException | IllegalArgumentException | FileSystemException e) { + // OK if the file does not have/does not support the attribute + } + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesGetPosixFilePermissions() throws IOException { + try { + Files.getPosixFilePermissions(readFile()); + } catch (UnsupportedOperationException | IllegalArgumentException | FileSystemException e) { + // OK if the file does not have/does not support the attribute + } + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesSetPosixFilePermissions() throws IOException { + var file = EntitledActions.createTempFileForWrite(); + try { + Files.setPosixFilePermissions(file, Set.of()); + } catch (UnsupportedOperationException | IllegalArgumentException | FileSystemException e) { + // OK if the file does not have/does not support the attribute + } + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesIsSymbolicLink() { + Files.isSymbolicLink(readFile()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesIsDirectory() { + Files.isDirectory(readFile()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesIsRegularFile() { + Files.isRegularFile(readFile()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesGetLastModifiedTime() throws IOException { + Files.getLastModifiedTime(readFile()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesSetLastModifiedTime() throws IOException { + var file = EntitledActions.createTempFileForWrite(); + Files.setLastModifiedTime(file, FileTime.from(Instant.now())); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesSize() throws IOException { + Files.size(readFile()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesExists() { + Files.exists(readFile()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesNotExists() { + Files.notExists(readFile()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesIsReadable() { + Files.isReadable(readFile()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesIsWriteable() { + Files.isWritable(readFile()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesIsExecutable() { + Files.isExecutable(readFile()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesWalkFileTree() throws IOException { + Files.walkFileTree(readDir(), new FileVisitor<>() { + @Override + public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { + return FileVisitResult.SKIP_SUBTREE; + } + + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { + return FileVisitResult.SKIP_SUBTREE; + } + + @Override + public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException { + return FileVisitResult.SKIP_SUBTREE; + } + + @Override + public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { + return FileVisitResult.SKIP_SUBTREE; + } + }); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesWalkFileTreeWithOptions() throws IOException { + Files.walkFileTree(readDir(), Set.of(FileVisitOption.FOLLOW_LINKS), 2, new FileVisitor<>() { + @Override + public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { + return FileVisitResult.SKIP_SUBTREE; + } + + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { + return FileVisitResult.SKIP_SUBTREE; + } + + @Override + public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException { + return FileVisitResult.SKIP_SUBTREE; + } + + @Override + public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { + return FileVisitResult.SKIP_SUBTREE; + } + }); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesNewBufferedReader() throws IOException { + Files.newBufferedReader(readFile()).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesNewBufferedReaderWithCharset() throws IOException { + Files.newBufferedReader(readFile(), Charset.defaultCharset()).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesNewBufferedWriter() throws IOException { + Files.newBufferedWriter(readWriteFile(), StandardOpenOption.WRITE).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesNewBufferedWriterWithCharset() throws IOException { + Files.newBufferedWriter(readWriteFile(), Charset.defaultCharset(), StandardOpenOption.WRITE).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesCopyInputStream() throws IOException { + var directory = EntitledActions.createTempDirectoryForWrite(); + Files.copy(new ByteArrayInputStream("foo".getBytes(StandardCharsets.UTF_8)), directory.resolve("copied")); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesCopyOutputStream() throws IOException { + Files.copy(readFile(), new ByteArrayOutputStream()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesReadAllBytes() throws IOException { + Files.readAllBytes(readFile()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesReadString() throws IOException { + Files.readString(readFile()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesReadStringWithCharset() throws IOException { + Files.readString(readFile(), Charset.defaultCharset()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesReadAllLines() throws IOException { + Files.readAllLines(readFile()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesReadAllLinesWithCharset() throws IOException { + Files.readAllLines(readFile(), Charset.defaultCharset()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesWrite() throws IOException { + var directory = EntitledActions.createTempDirectoryForWrite(); + Files.write(directory.resolve("file"), "foo".getBytes(StandardCharsets.UTF_8)); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesWriteLines() throws IOException { + var directory = EntitledActions.createTempDirectoryForWrite(); + Files.write(directory.resolve("file"), List.of("foo")); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesWriteString() throws IOException { + var directory = EntitledActions.createTempDirectoryForWrite(); + Files.writeString(directory.resolve("file"), "foo"); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesWriteStringWithCharset() throws IOException { + var directory = EntitledActions.createTempDirectoryForWrite(); + Files.writeString(directory.resolve("file"), "foo", Charset.defaultCharset()); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesList() throws IOException { + Files.list(readDir()).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesWalk() throws IOException { + Files.walk(readDir()).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesWalkWithDepth() throws IOException { + Files.walk(readDir(), 2).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesFind() throws IOException { + Files.find(readDir(), 2, (path, basicFileAttributes) -> false).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesLines() throws IOException { + Files.lines(readFile()).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void checkFilesLinesWithCharset() throws IOException { + Files.lines(readFile(), Charset.defaultCharset()).close(); + } + + private NioFilesActions() {} +} diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java index 8c0b8d18612f..ef6688a9a820 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/RestEntitlementsCheckAction.java @@ -189,6 +189,8 @@ public class RestEntitlementsCheckAction extends BaseRestHandler { getTestEntries(FileStoreActions.class), getTestEntries(ManageThreadsActions.class), getTestEntries(NativeActions.class), + getTestEntries(NioChannelsActions.class), + getTestEntries(NioFilesActions.class), getTestEntries(NioFileSystemActions.class), getTestEntries(PathActions.class), getTestEntries(SpiActions.class), diff --git a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsTestRule.java b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsTestRule.java index 9dc1028148a3..4c1536f17ef9 100644 --- a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsTestRule.java +++ b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsTestRule.java @@ -38,7 +38,7 @@ class EntitlementsTestRule implements TestRule { Map.of( "files", List.of( - Map.of("path", tempDir.resolve("read_dir"), "mode", "read"), + Map.of("path", tempDir.resolve("read_dir"), "mode", "read_write"), Map.of("path", tempDir.resolve("read_write_dir"), "mode", "read_write"), Map.of("path", tempDir.resolve("read_file"), "mode", "read"), Map.of("path", tempDir.resolve("read_write_file"), "mode", "read_write") diff --git a/libs/entitlement/src/main/java/module-info.java b/libs/entitlement/src/main/java/module-info.java index 5c8441bcecb9..697d26747b80 100644 --- a/libs/entitlement/src/main/java/module-info.java +++ b/libs/entitlement/src/main/java/module-info.java @@ -14,6 +14,7 @@ module org.elasticsearch.entitlement { requires org.elasticsearch.base; requires jdk.attach; requires java.net.http; + requires jdk.net; requires static org.elasticsearch.entitlement.bridge; // At runtime, this will be in java.base diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java index 17885fb10307..5c32cdbf88f5 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java @@ -39,6 +39,7 @@ public class EntitlementBootstrap { Function, String> pluginResolver, Function settingResolver, Function> settingGlobResolver, + Function repoDirResolver, Path[] dataDirs, Path configDir, Path logsDir, @@ -49,6 +50,7 @@ public class EntitlementBootstrap { requireNonNull(pluginResolver); requireNonNull(settingResolver); requireNonNull(settingGlobResolver); + requireNonNull(repoDirResolver); requireNonNull(dataDirs); if (dataDirs.length == 0) { throw new IllegalArgumentException("must provide at least one data directory"); @@ -71,6 +73,9 @@ public class EntitlementBootstrap { * * @param pluginPolicies a map holding policies for plugins (and modules), by plugin (or module) name. * @param pluginResolver a functor to map a Java Class to the plugin it belongs to (the plugin name). + * @param settingResolver a functor to resolve the value of an Elasticsearch setting. + * @param settingGlobResolver a functor to resolve a glob expression for one or more Elasticsearch settings. + * @param repoDirResolver a functor to map a repository location to its Elasticsearch path. * @param dataDirs data directories for Elasticsearch * @param configDir the config directory for Elasticsearch * @param tempDir the temp directory for Elasticsearch @@ -81,6 +86,7 @@ public class EntitlementBootstrap { Function, String> pluginResolver, Function settingResolver, Function> settingGlobResolver, + Function repoDirResolver, Path[] dataDirs, Path configDir, Path logsDir, @@ -95,6 +101,7 @@ public class EntitlementBootstrap { pluginResolver, settingResolver, settingGlobResolver, + repoDirResolver, dataDirs, configDir, logsDir, diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java index 0e501b7107a7..2a2aa689da9f 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java @@ -52,7 +52,6 @@ import java.nio.file.WatchService; import java.nio.file.attribute.FileAttribute; import java.nio.file.spi.FileSystemProvider; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -147,7 +146,18 @@ public class EntitlementInitialization { List serverScopes = new ArrayList<>(); Collections.addAll( serverScopes, - new Scope("org.elasticsearch.base", List.of(new CreateClassLoaderEntitlement())), + new Scope( + "org.elasticsearch.base", + List.of( + new CreateClassLoaderEntitlement(), + new FilesEntitlement( + List.of( + FileData.ofPath(bootstrapArgs.repoDirResolver().apply(""), READ_WRITE), + FileData.ofRelativePath(Path.of(""), FilesEntitlement.BaseDir.DATA, READ_WRITE) + ) + ) + ) + ), new Scope("org.elasticsearch.xcontent", List.of(new CreateClassLoaderEntitlement())), new Scope( "org.elasticsearch.server", @@ -160,31 +170,32 @@ public class EntitlementInitialization { new LoadNativeLibrariesEntitlement(), new ManageThreadsEntitlement(), new FilesEntitlement( - Stream.concat( - Stream.of( - FileData.ofPath(bootstrapArgs.tempDir(), READ_WRITE), - FileData.ofPath(bootstrapArgs.configDir(), READ), - FileData.ofPath(bootstrapArgs.logsDir(), READ_WRITE), - // OS release on Linux - FileData.ofPath(Path.of("/etc/os-release"), READ), - FileData.ofPath(Path.of("/etc/system-release"), READ), - FileData.ofPath(Path.of("/usr/lib/os-release"), READ), - // read max virtual memory areas - FileData.ofPath(Path.of("/proc/sys/vm/max_map_count"), READ), - FileData.ofPath(Path.of("/proc/meminfo"), READ), - // load averages on Linux - FileData.ofPath(Path.of("/proc/loadavg"), READ), - // control group stats on Linux. cgroup v2 stats are in an unpredicable - // location under `/sys/fs/cgroup`, so unfortunately we have to allow - // read access to the entire directory hierarchy. - FileData.ofPath(Path.of("/proc/self/cgroup"), READ), - FileData.ofPath(Path.of("/sys/fs/cgroup/"), READ), - // // io stats on Linux - FileData.ofPath(Path.of("/proc/self/mountinfo"), READ), - FileData.ofPath(Path.of("/proc/diskstats"), READ) - ), - Arrays.stream(bootstrapArgs.dataDirs()).map(d -> FileData.ofPath(d, READ)) - ).toList() + List.of( + // Base ES directories + FileData.ofPath(bootstrapArgs.tempDir(), READ_WRITE), + FileData.ofPath(bootstrapArgs.configDir(), READ), + FileData.ofPath(bootstrapArgs.logsDir(), READ_WRITE), + FileData.ofRelativePath(Path.of(""), FilesEntitlement.BaseDir.DATA, READ_WRITE), + FileData.ofPath(bootstrapArgs.repoDirResolver().apply(""), READ_WRITE), + + // OS release on Linux + FileData.ofPath(Path.of("/etc/os-release"), READ), + FileData.ofPath(Path.of("/etc/system-release"), READ), + FileData.ofPath(Path.of("/usr/lib/os-release"), READ), + // read max virtual memory areas + FileData.ofPath(Path.of("/proc/sys/vm/max_map_count"), READ), + FileData.ofPath(Path.of("/proc/meminfo"), READ), + // load averages on Linux + FileData.ofPath(Path.of("/proc/loadavg"), READ), + // control group stats on Linux. cgroup v2 stats are in an unpredicable + // location under `/sys/fs/cgroup`, so unfortunately we have to allow + // read access to the entire directory hierarchy. + FileData.ofPath(Path.of("/proc/self/cgroup"), READ), + FileData.ofPath(Path.of("/sys/fs/cgroup/"), READ), + // // io stats on Linux + FileData.ofPath(Path.of("/proc/self/mountinfo"), READ), + FileData.ofPath(Path.of("/proc/diskstats"), READ) + ) ) ) ), @@ -196,13 +207,18 @@ public class EntitlementInitialization { new LoadNativeLibrariesEntitlement(), new ManageThreadsEntitlement(), new FilesEntitlement( - Stream.concat( - Stream.of(FileData.ofPath(bootstrapArgs.configDir(), READ)), - Arrays.stream(bootstrapArgs.dataDirs()).map(d -> FileData.ofPath(d, READ_WRITE)) - ).toList() + List.of( + FileData.ofPath(bootstrapArgs.configDir(), READ), + FileData.ofPath(bootstrapArgs.tempDir(), READ), + FileData.ofRelativePath(Path.of(""), FilesEntitlement.BaseDir.DATA, READ_WRITE) + ) ) ) ), + new Scope( + "org.apache.lucene.misc", + List.of(new FilesEntitlement(List.of(FileData.ofRelativePath(Path.of(""), FilesEntitlement.BaseDir.DATA, READ_WRITE)))) + ), new Scope("org.apache.logging.log4j.core", List.of(new ManageThreadsEntitlement())), new Scope( "org.elasticsearch.nativeaccess", @@ -215,8 +231,10 @@ public class EntitlementInitialization { Path trustStorePath = trustStorePath(); if (trustStorePath != null) { - serverScopes.add( - new Scope("org.bouncycastle.fips.tls", List.of(new FilesEntitlement(List.of(FileData.ofPath(trustStorePath, READ))))) + Collections.addAll( + serverScopes, + new Scope("org.bouncycastle.fips.tls", List.of(new FilesEntitlement(List.of(FileData.ofPath(trustStorePath, READ))))), + new Scope("org.bouncycastle.fips.core", List.of(new ManageThreadsEntitlement())) ); } @@ -224,7 +242,16 @@ public class EntitlementInitialization { var serverPolicy = new Policy("server", serverScopes); // agents run without a module, so this is a special hack for the apm agent // this should be removed once https://github.com/elastic/elasticsearch/issues/109335 is completed - List agentEntitlements = List.of(new CreateClassLoaderEntitlement(), new ManageThreadsEntitlement()); + List agentEntitlements = List.of( + new CreateClassLoaderEntitlement(), + new ManageThreadsEntitlement(), + new FilesEntitlement( + List.of( + FileData.ofPath(Path.of("/co/elastic/apm/agent/"), READ), + FileData.ofPath(Path.of("/agent/co/elastic/apm/agent/"), READ) + ) + ) + ); var resolver = EntitlementBootstrap.bootstrapArgs().pluginResolver(); return new PolicyManager( serverPolicy, diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java index 1e2a88f82f7d..f6a45cce3c56 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java @@ -9,6 +9,8 @@ package org.elasticsearch.entitlement.runtime.api; +import jdk.nio.Channels; + import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.entitlement.bridge.EntitlementChecker; import org.elasticsearch.entitlement.runtime.policy.PolicyManager; @@ -19,6 +21,7 @@ import java.io.FileFilter; import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStream; +import java.io.OutputStream; import java.io.PrintStream; import java.io.PrintWriter; import java.lang.foreign.AddressLayout; @@ -64,6 +67,8 @@ import java.nio.file.AccessMode; import java.nio.file.CopyOption; import java.nio.file.DirectoryStream; import java.nio.file.FileStore; +import java.nio.file.FileVisitOption; +import java.nio.file.FileVisitor; import java.nio.file.Files; import java.nio.file.LinkOption; import java.nio.file.OpenOption; @@ -71,10 +76,17 @@ import java.nio.file.Path; import java.nio.file.StandardOpenOption; import java.nio.file.WatchEvent; import java.nio.file.WatchService; +import java.nio.file.attribute.BasicFileAttributes; import java.nio.file.attribute.FileAttribute; +import java.nio.file.attribute.FileAttributeView; +import java.nio.file.attribute.FileTime; +import java.nio.file.attribute.PosixFilePermission; import java.nio.file.attribute.UserPrincipal; import java.nio.file.spi.FileSystemProvider; +import java.security.KeyStore; +import java.security.Provider; import java.security.cert.CertStoreParameters; +import java.util.Arrays; import java.util.List; import java.util.Locale; import java.util.Map; @@ -83,6 +95,7 @@ import java.util.Set; import java.util.TimeZone; import java.util.concurrent.ExecutorService; import java.util.concurrent.ForkJoinPool; +import java.util.function.BiPredicate; import java.util.function.Consumer; import javax.net.ssl.HostnameVerifier; @@ -1232,6 +1245,36 @@ public class ElasticsearchEntitlementChecker implements EntitlementChecker { } } + @Override + public void check$java_security_KeyStore$$getInstance(Class callerClass, File file, char[] password) { + policyManager.checkFileRead(callerClass, file); + } + + @Override + public void check$java_security_KeyStore$$getInstance(Class callerClass, File file, KeyStore.LoadStoreParameter param) { + policyManager.checkFileRead(callerClass, file); + } + + @Override + public void check$java_security_KeyStore$Builder$$newInstance( + Class callerClass, + File file, + KeyStore.ProtectionParameter protection + ) { + policyManager.checkFileRead(callerClass, file); + } + + @Override + public void check$java_security_KeyStore$Builder$$newInstance( + Class callerClass, + String type, + Provider provider, + File file, + KeyStore.ProtectionParameter protection + ) { + policyManager.checkFileRead(callerClass, file); + } + @Override public void check$java_util_Scanner$(Class callerClass, File source) { policyManager.checkFileRead(callerClass, source); @@ -1247,8 +1290,134 @@ public class ElasticsearchEntitlementChecker implements EntitlementChecker { policyManager.checkFileRead(callerClass, source); } + @Override + public void check$java_util_jar_JarFile$(Class callerClass, String name) { + policyManager.checkFileRead(callerClass, new File(name)); + } + + @Override + public void check$java_util_jar_JarFile$(Class callerClass, String name, boolean verify) { + policyManager.checkFileRead(callerClass, new File(name)); + } + + @Override + public void check$java_util_jar_JarFile$(Class callerClass, File file) { + policyManager.checkFileRead(callerClass, file); + } + + @Override + public void check$java_util_jar_JarFile$(Class callerClass, File file, boolean verify) { + policyManager.checkFileRead(callerClass, file); + } + + @Override + public void check$java_util_jar_JarFile$(Class callerClass, File file, boolean verify, int mode) { + policyManager.checkFileWithZipMode(callerClass, file, mode); + } + + @Override + public void check$java_util_jar_JarFile$(Class callerClass, File file, boolean verify, int mode, Runtime.Version version) { + policyManager.checkFileWithZipMode(callerClass, file, mode); + } + + @Override + public void check$java_util_zip_ZipFile$(Class callerClass, String name) { + policyManager.checkFileRead(callerClass, new File(name)); + } + + @Override + public void check$java_util_zip_ZipFile$(Class callerClass, String name, Charset charset) { + policyManager.checkFileRead(callerClass, new File(name)); + } + + @Override + public void check$java_util_zip_ZipFile$(Class callerClass, File file) { + policyManager.checkFileRead(callerClass, file); + } + + @Override + public void check$java_util_zip_ZipFile$(Class callerClass, File file, int mode) { + policyManager.checkFileWithZipMode(callerClass, file, mode); + } + + @Override + public void check$java_util_zip_ZipFile$(Class callerClass, File file, Charset charset) { + policyManager.checkFileRead(callerClass, file); + } + + @Override + public void check$java_util_zip_ZipFile$(Class callerClass, File file, int mode, Charset charset) { + policyManager.checkFileWithZipMode(callerClass, file, mode); + } + // nio + @Override + public void check$java_nio_channels_FileChannel$(Class callerClass) { + policyManager.checkChangeFilesHandling(callerClass); + } + + @Override + public void check$java_nio_channels_FileChannel$$open( + Class callerClass, + Path path, + Set options, + FileAttribute... attrs + ) { + if (isOpenForWrite(options)) { + policyManager.checkFileWrite(callerClass, path); + } else { + policyManager.checkFileRead(callerClass, path); + } + } + + @Override + public void check$java_nio_channels_FileChannel$$open(Class callerClass, Path path, OpenOption... options) { + if (isOpenForWrite(options)) { + policyManager.checkFileWrite(callerClass, path); + } else { + policyManager.checkFileRead(callerClass, path); + } + } + + @Override + public void check$java_nio_channels_AsynchronousFileChannel$(Class callerClass) { + policyManager.checkChangeFilesHandling(callerClass); + } + + @Override + public void check$java_nio_channels_AsynchronousFileChannel$$open( + Class callerClass, + Path path, + Set options, + ExecutorService executor, + FileAttribute... attrs + ) { + if (isOpenForWrite(options)) { + policyManager.checkFileWrite(callerClass, path); + } else { + policyManager.checkFileRead(callerClass, path); + } + } + + @Override + public void check$java_nio_channels_AsynchronousFileChannel$$open(Class callerClass, Path path, OpenOption... options) { + if (isOpenForWrite(options)) { + policyManager.checkFileWrite(callerClass, path); + } else { + policyManager.checkFileRead(callerClass, path); + } + } + + @Override + public void check$jdk_nio_Channels$$readWriteSelectableChannel( + Class callerClass, + FileDescriptor fd, + Channels.SelectableChannelCloser closer + ) { + policyManager.checkFileDescriptorWrite(callerClass); + } + @Override public void check$java_nio_file_Files$$getOwner(Class callerClass, Path path, LinkOption... options) { policyManager.checkFileRead(callerClass, path); @@ -1264,6 +1433,411 @@ public class ElasticsearchEntitlementChecker implements EntitlementChecker { policyManager.checkFileWrite(callerClass, path); } + @Override + public void check$java_nio_file_Files$$newInputStream(Class callerClass, Path path, OpenOption... options) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$newOutputStream(Class callerClass, Path path, OpenOption... options) { + policyManager.checkFileWrite(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$newByteChannel( + Class callerClass, + Path path, + Set options, + FileAttribute... attrs + ) { + if (isOpenForWrite(options)) { + policyManager.checkFileWrite(callerClass, path); + } else { + policyManager.checkFileRead(callerClass, path); + } + } + + @Override + public void check$java_nio_file_Files$$newByteChannel(Class callerClass, Path path, OpenOption... options) { + if (isOpenForWrite(options)) { + policyManager.checkFileWrite(callerClass, path); + } else { + policyManager.checkFileRead(callerClass, path); + } + } + + @Override + public void check$java_nio_file_Files$$newDirectoryStream(Class callerClass, Path dir) { + policyManager.checkFileRead(callerClass, dir); + } + + @Override + public void check$java_nio_file_Files$$newDirectoryStream(Class callerClass, Path dir, String glob) { + policyManager.checkFileRead(callerClass, dir); + } + + @Override + public void check$java_nio_file_Files$$newDirectoryStream(Class callerClass, Path dir, DirectoryStream.Filter filter) { + policyManager.checkFileRead(callerClass, dir); + } + + @Override + public void check$java_nio_file_Files$$createFile(Class callerClass, Path path, FileAttribute... attrs) { + policyManager.checkFileWrite(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$createDirectory(Class callerClass, Path dir, FileAttribute... attrs) { + policyManager.checkFileWrite(callerClass, dir); + } + + @Override + public void check$java_nio_file_Files$$createDirectories(Class callerClass, Path dir, FileAttribute... attrs) { + policyManager.checkFileWrite(callerClass, dir); + } + + @Override + public void check$java_nio_file_Files$$createTempFile( + Class callerClass, + Path dir, + String prefix, + String suffix, + FileAttribute... attrs + ) { + policyManager.checkFileWrite(callerClass, dir); + } + + @Override + public void check$java_nio_file_Files$$createTempFile(Class callerClass, String prefix, String suffix, FileAttribute... attrs) { + policyManager.checkCreateTempFile(callerClass); + } + + @Override + public void check$java_nio_file_Files$$createTempDirectory(Class callerClass, Path dir, String prefix, FileAttribute... attrs) { + policyManager.checkFileWrite(callerClass, dir); + } + + @Override + public void check$java_nio_file_Files$$createTempDirectory(Class callerClass, String prefix, FileAttribute... attrs) { + policyManager.checkCreateTempFile(callerClass); + } + + @Override + public void check$java_nio_file_Files$$createSymbolicLink(Class callerClass, Path link, Path target, FileAttribute... attrs) { + policyManager.checkFileRead(callerClass, target); + policyManager.checkFileWrite(callerClass, link); + } + + @Override + public void check$java_nio_file_Files$$createLink(Class callerClass, Path link, Path existing) { + policyManager.checkFileRead(callerClass, existing); + policyManager.checkFileWrite(callerClass, link); + } + + @Override + public void check$java_nio_file_Files$$delete(Class callerClass, Path path) { + policyManager.checkFileWrite(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$deleteIfExists(Class callerClass, Path path) { + policyManager.checkFileWrite(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$copy(Class callerClass, Path source, Path target, CopyOption... options) { + policyManager.checkFileRead(callerClass, source); + policyManager.checkFileWrite(callerClass, target); + } + + @Override + public void check$java_nio_file_Files$$move(Class callerClass, Path source, Path target, CopyOption... options) { + policyManager.checkFileWrite(callerClass, source); + policyManager.checkFileWrite(callerClass, target); + } + + @Override + public void check$java_nio_file_Files$$readSymbolicLink(Class callerClass, Path link) { + policyManager.checkFileRead(callerClass, link); + } + + @Override + public void check$java_nio_file_Files$$getFileStore(Class callerClass, Path path) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$isSameFile(Class callerClass, Path path, Path path2) { + policyManager.checkFileRead(callerClass, path); + policyManager.checkFileRead(callerClass, path2); + } + + @Override + public void check$java_nio_file_Files$$mismatch(Class callerClass, Path path, Path path2) { + policyManager.checkFileRead(callerClass, path); + policyManager.checkFileRead(callerClass, path2); + } + + @Override + public void check$java_nio_file_Files$$isHidden(Class callerClass, Path path) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$getFileAttributeView( + Class callerClass, + Path path, + Class type, + LinkOption... options + ) { + policyManager.checkGetFileAttributeView(callerClass); + } + + @Override + public void check$java_nio_file_Files$$readAttributes( + Class callerClass, + Path path, + Class type, + LinkOption... options + ) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$setAttribute( + Class callerClass, + Path path, + String attribute, + Object value, + LinkOption... options + ) { + policyManager.checkFileWrite(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$getAttribute(Class callerClass, Path path, String attribute, LinkOption... options) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$readAttributes(Class callerClass, Path path, String attributes, LinkOption... options) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$getPosixFilePermissions(Class callerClass, Path path, LinkOption... options) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$setPosixFilePermissions(Class callerClass, Path path, Set perms) { + policyManager.checkFileWrite(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$isSymbolicLink(Class callerClass, Path path) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$isDirectory(Class callerClass, Path path, LinkOption... options) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$isRegularFile(Class callerClass, Path path, LinkOption... options) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$getLastModifiedTime(Class callerClass, Path path, LinkOption... options) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$setLastModifiedTime(Class callerClass, Path path, FileTime time) { + policyManager.checkFileWrite(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$size(Class callerClass, Path path) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$exists(Class callerClass, Path path, LinkOption... options) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$notExists(Class callerClass, Path path, LinkOption... options) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$isReadable(Class callerClass, Path path) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$isWritable(Class callerClass, Path path) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$isExecutable(Class callerClass, Path path) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$walkFileTree( + Class callerClass, + Path start, + Set options, + int maxDepth, + FileVisitor visitor + ) { + policyManager.checkFileRead(callerClass, start); + } + + @Override + public void check$java_nio_file_Files$$walkFileTree(Class callerClass, Path start, FileVisitor visitor) { + policyManager.checkFileRead(callerClass, start); + } + + @Override + public void check$java_nio_file_Files$$newBufferedReader(Class callerClass, Path path, Charset cs) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$newBufferedReader(Class callerClass, Path path) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$newBufferedWriter(Class callerClass, Path path, Charset cs, OpenOption... options) { + policyManager.checkFileWrite(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$newBufferedWriter(Class callerClass, Path path, OpenOption... options) { + policyManager.checkFileWrite(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$copy(Class callerClass, InputStream in, Path target, CopyOption... options) { + policyManager.checkFileWrite(callerClass, target); + } + + @Override + public void check$java_nio_file_Files$$copy(Class callerClass, Path source, OutputStream out) { + policyManager.checkFileRead(callerClass, source); + } + + @Override + public void check$java_nio_file_Files$$readAllBytes(Class callerClass, Path path) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$readString(Class callerClass, Path path) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$readString(Class callerClass, Path path, Charset cs) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$readAllLines(Class callerClass, Path path, Charset cs) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$readAllLines(Class callerClass, Path path) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$write(Class callerClass, Path path, byte[] bytes, OpenOption... options) { + policyManager.checkFileWrite(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$write( + Class callerClass, + Path path, + Iterable lines, + Charset cs, + OpenOption... options + ) { + policyManager.checkFileWrite(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$write( + Class callerClass, + Path path, + Iterable lines, + OpenOption... options + ) { + policyManager.checkFileWrite(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$writeString(Class callerClass, Path path, CharSequence csq, OpenOption... options) { + policyManager.checkFileWrite(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$writeString( + Class callerClass, + Path path, + CharSequence csq, + Charset cs, + OpenOption... options + ) { + policyManager.checkFileWrite(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$list(Class callerClass, Path dir) { + policyManager.checkFileRead(callerClass, dir); + } + + @Override + public void check$java_nio_file_Files$$walk(Class callerClass, Path start, int maxDepth, FileVisitOption... options) { + policyManager.checkFileRead(callerClass, start); + } + + @Override + public void check$java_nio_file_Files$$walk(Class callerClass, Path start, FileVisitOption... options) { + policyManager.checkFileRead(callerClass, start); + } + + @Override + public void check$java_nio_file_Files$$find( + Class callerClass, + Path start, + int maxDepth, + BiPredicate matcher, + FileVisitOption... options + ) { + policyManager.checkFileRead(callerClass, start); + } + + @Override + public void check$java_nio_file_Files$$lines(Class callerClass, Path path, Charset cs) { + policyManager.checkFileRead(callerClass, path); + } + + @Override + public void check$java_nio_file_Files$$lines(Class callerClass, Path path) { + policyManager.checkFileRead(callerClass, path); + } + // file system providers @Override @@ -1299,6 +1873,17 @@ public class ElasticsearchEntitlementChecker implements EntitlementChecker { || options.contains(StandardOpenOption.DELETE_ON_CLOSE); } + private static boolean isOpenForWrite(OpenOption... options) { + return Arrays.stream(options) + .anyMatch( + o -> o.equals(StandardOpenOption.WRITE) + || o.equals(StandardOpenOption.APPEND) + || o.equals(StandardOpenOption.CREATE) + || o.equals(StandardOpenOption.CREATE_NEW) + || o.equals(StandardOpenOption.DELETE_ON_CLOSE) + ); + } + @Override public void checkNewFileChannel( Class callerClass, diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java index d607a5e25b41..66e44576b745 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java @@ -11,6 +11,8 @@ package org.elasticsearch.entitlement.runtime.policy; import org.elasticsearch.core.Strings; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.entitlement.bootstrap.EntitlementBootstrap; +import org.elasticsearch.entitlement.bridge.EntitlementChecker; import org.elasticsearch.entitlement.instrumentation.InstrumentationService; import org.elasticsearch.entitlement.runtime.api.NotEntitledException; import org.elasticsearch.entitlement.runtime.policy.entitlements.CreateClassLoaderEntitlement; @@ -48,6 +50,8 @@ import static java.util.Objects.requireNonNull; import static java.util.function.Predicate.not; import static java.util.stream.Collectors.groupingBy; import static java.util.stream.Collectors.toUnmodifiableMap; +import static java.util.zip.ZipFile.OPEN_DELETE; +import static java.util.zip.ZipFile.OPEN_READ; public class PolicyManager { private static final Logger logger = LogManager.getLogger(PolicyManager.class); @@ -215,7 +219,8 @@ public class PolicyManager { requestingClass.getModule().getName(), requestingClass, operationDescription.get() - ) + ), + callerClass ); } @@ -254,6 +259,13 @@ public class PolicyManager { checkChangeJVMGlobalState(callerClass); } + /** + * Check for operations that can modify the way file operations are handled + */ + public void checkChangeFilesHandling(Class callerClass) { + checkChangeJVMGlobalState(callerClass); + } + @SuppressForbidden(reason = "Explicitly checking File apis") public void checkFileRead(Class callerClass, File file) { checkFileRead(callerClass, file.toPath()); @@ -274,7 +286,8 @@ public class PolicyManager { requestingClass.getModule().getName(), requestingClass, path - ) + ), + callerClass ); } } @@ -299,11 +312,28 @@ public class PolicyManager { requestingClass.getModule().getName(), requestingClass, path - ) + ), + callerClass ); } } + public void checkCreateTempFile(Class callerClass) { + checkFileWrite(callerClass, pathLookup.tempDir()); + } + + @SuppressForbidden(reason = "Explicitly checking File apis") + public void checkFileWithZipMode(Class callerClass, File file, int zipMode) { + assert zipMode == OPEN_READ || zipMode == (OPEN_READ | OPEN_DELETE); + if ((zipMode & OPEN_DELETE) == OPEN_DELETE) { + // This needs both read and write, but we happen to know that checkFileWrite + // actually checks both. + checkFileWrite(callerClass, file); + } else { + checkFileRead(callerClass, file); + } + } + public void checkFileDescriptorRead(Class callerClass) { neverEntitled(callerClass, () -> "read file descriptor"); } @@ -348,14 +378,15 @@ public class PolicyManager { } var classEntitlements = getEntitlements(requestingClass); - checkFlagEntitlement(classEntitlements, InboundNetworkEntitlement.class, requestingClass); - checkFlagEntitlement(classEntitlements, OutboundNetworkEntitlement.class, requestingClass); + checkFlagEntitlement(classEntitlements, InboundNetworkEntitlement.class, requestingClass, callerClass); + checkFlagEntitlement(classEntitlements, OutboundNetworkEntitlement.class, requestingClass, callerClass); } private static void checkFlagEntitlement( ModuleEntitlements classEntitlements, Class entitlementClass, - Class requestingClass + Class requestingClass, + Class callerClass ) { if (classEntitlements.hasEntitlement(entitlementClass) == false) { notEntitled( @@ -365,7 +396,8 @@ public class PolicyManager { requestingClass.getModule().getName(), requestingClass, PolicyParser.getEntitlementTypeName(entitlementClass) - ) + ), + callerClass ); } logger.debug( @@ -405,12 +437,18 @@ public class PolicyManager { requestingClass.getModule().getName(), requestingClass, property - ) + ), + callerClass ); } - private static void notEntitled(String message) { - throw new NotEntitledException(message); + private static void notEntitled(String message, Class callerClass) { + var exception = new NotEntitledException(message); + // don't log self tests in EntitlementBootstrap + if (EntitlementBootstrap.class.equals(callerClass) == false) { + logger.warn(message, exception); + } + throw exception; } public void checkManageThreadsEntitlement(Class callerClass) { @@ -422,7 +460,7 @@ public class PolicyManager { if (isTriviallyAllowed(requestingClass)) { return; } - checkFlagEntitlement(getEntitlements(requestingClass), entitlementClass, requestingClass); + checkFlagEntitlement(getEntitlements(requestingClass), entitlementClass, requestingClass, callerClass); } ModuleEntitlements getEntitlements(Class requestingClass) { @@ -522,6 +560,10 @@ public class PolicyManager { logger.debug("Entitlement trivially allowed from system module [{}]", requestingClass.getModule().getName()); return true; } + if (EntitlementChecker.class.isAssignableFrom(requestingClass)) { + logger.debug("Entitlement trivially allowed for EntitlementChecker class"); + return true; + } logger.trace("Entitlement not trivially allowed"); return false; } diff --git a/modules/analysis-common/src/main/plugin-metadata/entitlement-policy.yaml b/modules/analysis-common/src/main/plugin-metadata/entitlement-policy.yaml new file mode 100644 index 000000000000..e59b9dd27406 --- /dev/null +++ b/modules/analysis-common/src/main/plugin-metadata/entitlement-policy.yaml @@ -0,0 +1,5 @@ +org.elasticsearch.analysis.common: + - files: + - relative_path: analysis + relative_to: config + mode: read diff --git a/modules/ingest-geoip/src/main/plugin-metadata/entitlement-policy.yaml b/modules/ingest-geoip/src/main/plugin-metadata/entitlement-policy.yaml index cef2e0cd6aeb..0070b1978d51 100644 --- a/modules/ingest-geoip/src/main/plugin-metadata/entitlement-policy.yaml +++ b/modules/ingest-geoip/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,5 +1,10 @@ +org.elasticsearch.ingest.geoip: + - files: + - relative_path: "ingest-geoip" + relative_to: config + mode: read com.maxmind.db: - files: - - relative_path: "ingest-geoip/" - relative_to: "config" - mode: "read_write" + - relative_path: "ingest-geoip/" + relative_to: "config" + mode: "read_write" diff --git a/modules/ingest-user-agent/src/main/plugin-metadata/entitlement-policy.yaml b/modules/ingest-user-agent/src/main/plugin-metadata/entitlement-policy.yaml new file mode 100644 index 000000000000..bee7f4570cc6 --- /dev/null +++ b/modules/ingest-user-agent/src/main/plugin-metadata/entitlement-policy.yaml @@ -0,0 +1,5 @@ +org.elasticsearch.ingest.useragent: + - files: + - relative_path: ingest-user-agent + relative_to: config + mode: read diff --git a/modules/reindex/src/main/plugin-metadata/entitlement-policy.yaml b/modules/reindex/src/main/plugin-metadata/entitlement-policy.yaml index 394e5e38d9f5..d1e8d1aca74d 100644 --- a/modules/reindex/src/main/plugin-metadata/entitlement-policy.yaml +++ b/modules/reindex/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,3 +1,7 @@ ALL-UNNAMED: - manage_threads - outbound_network + - files: + - relative_path: "" + relative_to: config + mode: read diff --git a/modules/repository-azure/src/main/plugin-metadata/entitlement-policy.yaml b/modules/repository-azure/src/main/plugin-metadata/entitlement-policy.yaml index f22076b360b6..ec454ad3202d 100644 --- a/modules/repository-azure/src/main/plugin-metadata/entitlement-policy.yaml +++ b/modules/repository-azure/src/main/plugin-metadata/entitlement-policy.yaml @@ -6,3 +6,10 @@ io.netty.common: mode: "read" - path: "/usr/lib/os-release" mode: "read" + - path: "/proc/sys/net/core/somaxconn" + mode: read +com.azure.identity: + - files: + - relative_path: "storage-azure/" #/config/storage-azure/azure-federated-token + relative_to: config + mode: read diff --git a/modules/transport-netty4/src/main/plugin-metadata/entitlement-policy.yaml b/modules/transport-netty4/src/main/plugin-metadata/entitlement-policy.yaml index 7a3f2c11d69b..430c925add8d 100644 --- a/modules/transport-netty4/src/main/plugin-metadata/entitlement-policy.yaml +++ b/modules/transport-netty4/src/main/plugin-metadata/entitlement-policy.yaml @@ -11,3 +11,5 @@ io.netty.common: mode: "read" - path: "/usr/lib/os-release" mode: "read" + - path: "/proc/sys/net/core/somaxconn" + mode: read diff --git a/muted-tests.yml b/muted-tests.yml index 8d451e2726b6..d671fbb2e6da 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -252,8 +252,6 @@ tests: - class: org.elasticsearch.analysis.common.CommonAnalysisClientYamlTestSuiteIT method: test {yaml=analysis-common/40_token_filters/stemmer_override file access} issue: https://github.com/elastic/elasticsearch/issues/121625 -- class: org.elasticsearch.xpack.searchablesnapshots.hdfs.SecureHdfsSearchableSnapshotsIT - issue: https://github.com/elastic/elasticsearch/issues/121967 - class: org.elasticsearch.xpack.application.CohereServiceUpgradeIT issue: https://github.com/elastic/elasticsearch/issues/121537 - class: org.elasticsearch.xpack.restart.FullClusterRestartIT @@ -290,10 +288,6 @@ tests: - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT method: test {yaml=reference/snapshot-restore/apis/get-snapshot-api/line_488} issue: https://github.com/elastic/elasticsearch/issues/121611 -- class: org.elasticsearch.repositories.blobstore.testkit.analyze.SecureHdfsRepositoryAnalysisRestIT - issue: https://github.com/elastic/elasticsearch/issues/122377 -- class: org.elasticsearch.repositories.blobstore.testkit.analyze.HdfsRepositoryAnalysisRestIT - issue: https://github.com/elastic/elasticsearch/issues/122378 - class: org.elasticsearch.xpack.inference.mapper.SemanticInferenceMetadataFieldsRecoveryTests method: testSnapshotRecovery {p0=false p1=false} issue: https://github.com/elastic/elasticsearch/issues/122549 @@ -328,6 +322,12 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/122913 - class: org.elasticsearch.xpack.search.AsyncSearchSecurityIT issue: https://github.com/elastic/elasticsearch/issues/122940 +- class: org.elasticsearch.action.admin.indices.create.ShrinkIndexIT + method: testShrinkIndexPrimaryTerm + issue: https://github.com/elastic/elasticsearch/issues/122974 +- class: org.elasticsearch.test.apmintegration.TracesApmIT + method: testApmIntegration + issue: https://github.com/elastic/elasticsearch/issues/122129 # Examples: # diff --git a/plugins/analysis-icu/src/main/plugin-metadata/entitlement-policy.yaml b/plugins/analysis-icu/src/main/plugin-metadata/entitlement-policy.yaml new file mode 100644 index 000000000000..7a261a774e4a --- /dev/null +++ b/plugins/analysis-icu/src/main/plugin-metadata/entitlement-policy.yaml @@ -0,0 +1,5 @@ +org.elasticsearch.analysis.icu: + - files: + - relative_path: "" + relative_to: config + mode: read diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 7df46699b79e..eac0109bc499 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -22,20 +22,35 @@ versions << [ 'hadoop': '3.4.1' ] +def patched = Attribute.of('patched', Boolean) + configurations { hdfsFixture2 hdfsFixture3 + compileClasspath { + attributes { + attribute(patched, true) + } + } + runtimeClasspath { + attributes { + attribute(patched, true) + } + } + testCompileClasspath { + attributes { + attribute(patched, true) + } + } + testRuntimeClasspath { + attributes { + attribute(patched, true) + } + } } dependencies { - api project(path: 'hadoop-client-api', configuration: 'default') - if (isEclipse) { - /* - * Eclipse can't pick up the shadow dependency so we point it at *something* - * so it can compile things. - */ - api project(path: 'hadoop-client-api') - } + api("org.apache.hadoop:hadoop-client-api:${versions.hadoop}") runtimeOnly "org.apache.hadoop:hadoop-client-runtime:${versions.hadoop}" implementation "org.apache.hadoop:hadoop-hdfs:${versions.hadoop}" api "com.google.protobuf:protobuf-java:${versions.protobuf}" @@ -69,6 +84,20 @@ dependencies { hdfsFixture2 project(path: ':test:fixtures:hdfs-fixture', configuration: 'shadowedHdfs2') hdfsFixture3 project(path: ':test:fixtures:hdfs-fixture', configuration: 'shadow') + + attributesSchema { + attribute(patched) + } + artifactTypes.getByName("jar") { + attributes.attribute(patched, false) + } + registerTransform(org.elasticsearch.gradle.internal.dependencies.patches.hdfs.HdfsClassPatcher) { + from.attribute(patched, false) + to.attribute(patched, true) + parameters { + matchingArtifacts = ["hadoop-client-api"] + } + } } restResources { @@ -190,6 +219,15 @@ tasks.named("thirdPartyAudit").configure { 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$MemoryAccessor', 'org.apache.hadoop.thirdparty.protobuf.MessageSchema', 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$Android32MemoryAccessor', - 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$Android64MemoryAccessor' + 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$Android64MemoryAccessor', + 'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$Android64MemoryAccessor', + 'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm', + 'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm$Slot', + 'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer', + 'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer$1', + 'org.apache.hadoop.io.nativeio.NativeIO', + 'org.apache.hadoop.service.launcher.InterruptEscalator', + 'org.apache.hadoop.service.launcher.IrqHandler', + 'org.apache.hadoop.util.SignalLogger$Handler' ) } diff --git a/plugins/repository-hdfs/hadoop-client-api/build.gradle b/plugins/repository-hdfs/hadoop-client-api/build.gradle deleted file mode 100644 index 46b0d949cdee..000000000000 --- a/plugins/repository-hdfs/hadoop-client-api/build.gradle +++ /dev/null @@ -1,54 +0,0 @@ -import org.gradle.api.file.ArchiveOperations - -apply plugin: 'elasticsearch.java' - -sourceSets { - patcher -} - -configurations { - thejar { - canBeResolved = true - } -} - -dependencies { - thejar("org.apache.hadoop:hadoop-client-api:${project.parent.versions.hadoop}") { - transitive = false - } - - patcherImplementation 'org.ow2.asm:asm:9.7.1' - patcherImplementation 'org.ow2.asm:asm-tree:9.7.1' -} - -def outputDir = layout.buildDirectory.dir("patched-classes") - -def patchTask = tasks.register("patchClasses", JavaExec) { - inputs.files(configurations.thejar).withPathSensitivity(PathSensitivity.RELATIVE) - inputs.files(sourceSets.patcher.output).withPathSensitivity(PathSensitivity.RELATIVE) - outputs.dir(outputDir) - classpath = sourceSets.patcher.runtimeClasspath - mainClass = 'org.elasticsearch.hdfs.patch.HdfsClassPatcher' - def thejar = configurations.thejar - doFirst { - args(thejar.singleFile, outputDir.get().asFile) - } -} - -interface InjectedArchiveOps { - @Inject ArchiveOperations getArchiveOperations() -} - -tasks.named('jar').configure { - dependsOn(configurations.thejar) - def injected = project.objects.newInstance(InjectedArchiveOps) - def thejar = configurations.thejar - from(patchTask) - from({ injected.getArchiveOperations().zipTree(thejar.singleFile) }) { - eachFile { - if (outputDir.get().file(it.relativePath.pathString).asFile.exists()) { - it.exclude() - } - } - } -} diff --git a/plugins/repository-hdfs/hadoop-client-api/licenses/hadoop-client-api-LICENSE.txt b/plugins/repository-hdfs/hadoop-client-api/licenses/hadoop-client-api-LICENSE.txt deleted file mode 100644 index d64569567334..000000000000 --- a/plugins/repository-hdfs/hadoop-client-api/licenses/hadoop-client-api-LICENSE.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/plugins/repository-hdfs/hadoop-client-api/licenses/hadoop-client-api-NOTICE.txt b/plugins/repository-hdfs/hadoop-client-api/licenses/hadoop-client-api-NOTICE.txt deleted file mode 100644 index 62fc5816c996..000000000000 --- a/plugins/repository-hdfs/hadoop-client-api/licenses/hadoop-client-api-NOTICE.txt +++ /dev/null @@ -1,2 +0,0 @@ -This product includes software developed by The Apache Software -Foundation (http://www.apache.org/). diff --git a/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/HdfsClassPatcher.java b/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/HdfsClassPatcher.java deleted file mode 100644 index 732c55929454..000000000000 --- a/plugins/repository-hdfs/hadoop-client-api/src/patcher/java/org/elasticsearch/hdfs/patch/HdfsClassPatcher.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.hdfs.patch; - -import org.objectweb.asm.ClassReader; -import org.objectweb.asm.ClassVisitor; -import org.objectweb.asm.ClassWriter; - -import java.io.File; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.Map; -import java.util.function.Function; -import java.util.jar.JarEntry; -import java.util.jar.JarFile; - -public class HdfsClassPatcher { - static final Map> patchers = Map.of( - "org/apache/hadoop/util/ShutdownHookManager.class", - ShutdownHookManagerPatcher::new, - "org/apache/hadoop/util/Shell.class", - ShellPatcher::new, - "org/apache/hadoop/security/UserGroupInformation.class", - SubjectGetSubjectPatcher::new, - "org/apache/hadoop/security/authentication/client/KerberosAuthenticator.class", - SubjectGetSubjectPatcher::new - ); - - public static void main(String[] args) throws Exception { - String jarPath = args[0]; - Path outputDir = Paths.get(args[1]); - - try (JarFile jarFile = new JarFile(new File(jarPath))) { - for (var patcher : patchers.entrySet()) { - JarEntry jarEntry = jarFile.getJarEntry(patcher.getKey()); - if (jarEntry == null) { - throw new IllegalArgumentException("path [" + patcher.getKey() + "] not found in [" + jarPath + "]"); - } - byte[] classToPatch = jarFile.getInputStream(jarEntry).readAllBytes(); - - ClassReader classReader = new ClassReader(classToPatch); - ClassWriter classWriter = new ClassWriter(classReader, 0); - classReader.accept(patcher.getValue().apply(classWriter), 0); - - Path outputFile = outputDir.resolve(patcher.getKey()); - Files.createDirectories(outputFile.getParent()); - Files.write(outputFile, classWriter.toByteArray()); - } - } - } -} diff --git a/plugins/repository-hdfs/src/main/plugin-metadata/entitlement-policy.yaml b/plugins/repository-hdfs/src/main/plugin-metadata/entitlement-policy.yaml index 30e61739a063..0c921d8d6169 100644 --- a/plugins/repository-hdfs/src/main/plugin-metadata/entitlement-policy.yaml +++ b/plugins/repository-hdfs/src/main/plugin-metadata/entitlement-policy.yaml @@ -5,3 +5,7 @@ ALL-UNNAMED: - write_system_properties: properties: - hadoop.home.dir + - files: + - relative_path: "repository-hdfs/" + relative_to: config + mode: read_write diff --git a/plugins/store-smb/src/main/plugin-metadata/entitlement-policy.yaml b/plugins/store-smb/src/main/plugin-metadata/entitlement-policy.yaml new file mode 100644 index 000000000000..1022253171a1 --- /dev/null +++ b/plugins/store-smb/src/main/plugin-metadata/entitlement-policy.yaml @@ -0,0 +1,5 @@ +ALL-UNNAMED: + - files: + - relative_path: "indices/" + relative_to: data + mode: read_write diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json index 8609eb6cf93a..19b3ac54769c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json @@ -65,10 +65,6 @@ ], "default":"open", "description":"Whether to expand wildcard expression to concrete indices that are open, closed or both." - }, - "local":{ - "type":"boolean", - "description":"Return local information, do not retrieve the state from master node (default: false)" } } } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.get_field_mapping/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.get_field_mapping/10_basic.yml index b077cbe98b28..f19f315edb51 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.get_field_mapping/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.get_field_mapping/10_basic.yml @@ -50,17 +50,3 @@ setup: - match: {test_index.mappings.text.mapping.text.type: text} - match: {test_index.mappings.text.mapping.text.analyzer: default} - ---- -"Get field mapping with local parameter should fail": - - - requires: - test_runner_features: ["warnings"] - cluster_features: ["gte_v8.0.0"] - reason: "local parameter for get field mapping API was allowed before v8" - - - do: - catch: bad_request - indices.get_field_mapping: - fields: text - local: true diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index 429d2584b0fa..3f5f38e64ac9 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -247,6 +247,7 @@ class Elasticsearch { pluginsResolver::resolveClassToPluginName, nodeEnv.settings()::get, nodeEnv.settings()::getGlobValues, + nodeEnv::resolveRepoDir, nodeEnv.dataDirs(), nodeEnv.configDir(), nodeEnv.logsDir(), diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/RoutingNode.java b/server/src/main/java/org/elasticsearch/cluster/routing/RoutingNode.java index c18591d07a85..2b5806724c75 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/RoutingNode.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/RoutingNode.java @@ -112,6 +112,9 @@ public class RoutingNode implements Iterable { return this.nodeId; } + /** + * Number of shards assigned to this node. Includes relocating shards. Use {@link #numberOfOwningShards()} to exclude relocating shards. + */ public int size() { return shards.size(); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java index 2042b2491432..38c2806778df 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java @@ -521,11 +521,6 @@ public class DesiredBalanceReconciler { } private DesiredBalanceMetrics.AllocationStats balance() { - // Check if rebalancing is disabled. - if (allocation.deciders().canRebalance(allocation).type() != Decision.Type.YES) { - return DesiredBalanceMetrics.EMPTY_ALLOCATION_STATS; - } - int unassignedShards = routingNodes.unassigned().size() + routingNodes.unassigned().ignored().size(); int totalAllocations = 0; int undesiredAllocationsExcludingShuttingDownNodes = 0; @@ -555,9 +550,15 @@ public class DesiredBalanceReconciler { } if (allocation.metadata().nodeShutdowns().contains(shardRouting.currentNodeId()) == false) { + // shard is not on a shutting down node, nor is it on a desired node per the previous check. undesiredAllocationsExcludingShuttingDownNodes++; } + if (allocation.deciders().canRebalance(allocation).type() != Decision.Type.YES) { + // Rebalancing is disabled, we're just here to collect the AllocationStats to return. + continue; + } + if (allocation.deciders().canRebalance(shardRouting, allocation).type() != Decision.Type.YES) { // rebalancing disabled for this shard continue; diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDecider.java index 62089856d783..7fae18a332f0 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDecider.java @@ -75,9 +75,9 @@ public abstract class AllocationDecider { } /** - * Returns a {@link Decision} whether the cluster can execute - * re-balanced operations at all. - * {@link Decision#ALWAYS}. + * Returns a {@link Decision} on whether the cluster is allowed to rebalance shards to improve relative node shard weights and + * performance. + * @return {@link Decision#ALWAYS} is returned by default if not overridden. */ public Decision canRebalance(RoutingAllocation allocation) { return Decision.ALWAYS; diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDeciders.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDeciders.java index dd4b66335207..c80aa1e69f21 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDeciders.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AllocationDeciders.java @@ -82,6 +82,10 @@ public class AllocationDeciders { ); } + /** + * Returns whether rebalancing (move shards to improve relative node weights and performance) is allowed right now. + * Rebalancing can be disabled via cluster settings, or throttled by cluster settings (e.g. max concurrent shard moves). + */ public Decision canRebalance(RoutingAllocation allocation) { return withDeciders( allocation, diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java index 7d866983496f..3cf012d3faa3 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java @@ -150,6 +150,10 @@ public class ClusterRebalanceAllocationDecider extends AllocationDecider { + "]" ); + /** + * Rebalancing may be enabled, disabled, or only allowed after all primaries have started, depending on the cluster setting + * {@link #CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING}. + */ @SuppressWarnings("fallthrough") @Override public Decision canRebalance(RoutingAllocation allocation) { diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java index 6c457267a5ff..deb3e4440f4a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java @@ -61,6 +61,11 @@ public class ConcurrentRebalanceAllocationDecider extends AllocationDecider { return canRebalance(allocation); } + /** + * We allow a limited number of concurrent shard relocations, per the cluster setting + * {@link #CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING}. + * Returns a {@link Decision#THROTTLE} decision if the limit is exceeded, otherwise returns {@link Decision#YES}. + */ @Override public Decision canRebalance(RoutingAllocation allocation) { int relocatingShards = allocation.routingNodes().getRelocatingShardCount(); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java index d3ba31eae3e4..e539fa23c5f5 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java @@ -147,6 +147,11 @@ public class EnableAllocationDecider extends AllocationDecider { }; } + /** + * Rebalancing is limited by the {@link Rebalance} value set on the cluster setting {@link #CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING}. + * We might allow movement only of primary shards, or replica shards, or none, or all. + * This method only concerns itself with whether {@link Rebalance#NONE} is set: rebalancing is allowed for all other setting values. + */ @Override public Decision canRebalance(RoutingAllocation allocation) { if (allocation.ignoreDisable()) { @@ -246,7 +251,7 @@ public class EnableAllocationDecider extends AllocationDecider { } /** - * Rebalance values or rather their string representation to be used used with + * Rebalance values or rather their string representation to be used with * {@link EnableAllocationDecider#CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING} / * {@link EnableAllocationDecider#INDEX_ROUTING_REBALANCE_ENABLE_SETTING} * via cluster / index settings. diff --git a/server/src/main/java/org/elasticsearch/index/IndexMode.java b/server/src/main/java/org/elasticsearch/index/IndexMode.java index 015614c04198..a4708f93335c 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexMode.java +++ b/server/src/main/java/org/elasticsearch/index/IndexMode.java @@ -28,6 +28,7 @@ import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MappingLookup; +import org.elasticsearch.index.mapper.MappingParserContext; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.ProvidedIdFieldMapper; import org.elasticsearch.index.mapper.RoutingFieldMapper; @@ -90,7 +91,7 @@ public enum IndexMode { } @Override - public MetadataFieldMapper timeSeriesIdFieldMapper() { + public MetadataFieldMapper timeSeriesIdFieldMapper(MappingParserContext c) { // non time-series indices must not have a TimeSeriesIdFieldMapper return null; } @@ -187,8 +188,8 @@ public enum IndexMode { } @Override - public MetadataFieldMapper timeSeriesIdFieldMapper() { - return TimeSeriesIdFieldMapper.INSTANCE; + public MetadataFieldMapper timeSeriesIdFieldMapper(MappingParserContext c) { + return TimeSeriesIdFieldMapper.getInstance(c); } @Override @@ -277,7 +278,7 @@ public enum IndexMode { } @Override - public MetadataFieldMapper timeSeriesIdFieldMapper() { + public MetadataFieldMapper timeSeriesIdFieldMapper(MappingParserContext c) { // non time-series indices must not have a TimeSeriesIdFieldMapper return null; } @@ -348,7 +349,7 @@ public enum IndexMode { } @Override - public MetadataFieldMapper timeSeriesIdFieldMapper() { + public MetadataFieldMapper timeSeriesIdFieldMapper(MappingParserContext c) { // non time-series indices must not have a TimeSeriesIdFieldMapper return null; } @@ -518,7 +519,7 @@ public enum IndexMode { * the _tsid field. The field mapper will be added to the list of the metadata * field mappers for the index. */ - public abstract MetadataFieldMapper timeSeriesIdFieldMapper(); + public abstract MetadataFieldMapper timeSeriesIdFieldMapper(MappingParserContext c); /** * Return an instance of the {@link TimeSeriesRoutingHashFieldMapper} that generates diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 2d1c80fe4c44..6152f50fe297 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -147,6 +147,7 @@ public class IndexVersions { public static final IndexVersion UPGRADE_TO_LUCENE_10_1_0 = def(9_009_0_00, Version.LUCENE_10_1_0); public static final IndexVersion USE_SYNTHETIC_SOURCE_FOR_RECOVERY_BY_DEFAULT = def(9_010_00_0, Version.LUCENE_10_1_0); public static final IndexVersion TIMESTAMP_DOC_VALUES_SPARSE_INDEX = def(9_011_0_00, Version.LUCENE_10_1_0); + public static final IndexVersion TIME_SERIES_ID_DOC_VALUES_SPARSE_INDEX = def(9_012_0_00, Version.LUCENE_10_1_0); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapper.java index 3325ad4e96eb..1230fb2bb431 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapper.java @@ -46,17 +46,36 @@ public class TimeSeriesIdFieldMapper extends MetadataFieldMapper { public static final String NAME = "_tsid"; public static final String CONTENT_TYPE = "_tsid"; public static final TimeSeriesIdFieldType FIELD_TYPE = new TimeSeriesIdFieldType(); - public static final TimeSeriesIdFieldMapper INSTANCE = new TimeSeriesIdFieldMapper(); + + private static final TimeSeriesIdFieldMapper INSTANCE_WITHOUT_SKIPPER = new TimeSeriesIdFieldMapper(false); + private static final TimeSeriesIdFieldMapper INSTANCE_WITH_SKIPPER = new TimeSeriesIdFieldMapper(true); + + public static TimeSeriesIdFieldMapper getInstance(boolean useDocValuesSkipper) { + if (useDocValuesSkipper) { + return INSTANCE_WITH_SKIPPER; + } else { + return INSTANCE_WITHOUT_SKIPPER; + } + } + + public static TimeSeriesIdFieldMapper getInstance(MappingParserContext context) { + boolean useDocValuesSkipper = context.indexVersionCreated().onOrAfter(IndexVersions.TIME_SERIES_ID_DOC_VALUES_SPARSE_INDEX) + && context.getIndexSettings().useDocValuesSkipper(); + return TimeSeriesIdFieldMapper.getInstance(useDocValuesSkipper); + } @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder().init(this); + return new Builder(this.useDocValuesSkipper).init(this); } public static class Builder extends MetadataFieldMapper.Builder { - protected Builder() { + private final boolean useDocValuesSkipper; + + protected Builder(boolean useDocValuesSkipper) { super(NAME); + this.useDocValuesSkipper = useDocValuesSkipper; } @Override @@ -66,11 +85,11 @@ public class TimeSeriesIdFieldMapper extends MetadataFieldMapper { @Override public TimeSeriesIdFieldMapper build() { - return INSTANCE; + return TimeSeriesIdFieldMapper.getInstance(useDocValuesSkipper); } } - public static final TypeParser PARSER = new FixedTypeParser(c -> c.getIndexSettings().getMode().timeSeriesIdFieldMapper()); + public static final TypeParser PARSER = new FixedTypeParser(c -> c.getIndexSettings().getMode().timeSeriesIdFieldMapper(c)); public static final class TimeSeriesIdFieldType extends MappedFieldType { private TimeSeriesIdFieldType() { @@ -115,8 +134,11 @@ public class TimeSeriesIdFieldMapper extends MetadataFieldMapper { } } - private TimeSeriesIdFieldMapper() { + private final boolean useDocValuesSkipper; + + private TimeSeriesIdFieldMapper(boolean useDocValuesSkipper) { super(FIELD_TYPE); + this.useDocValuesSkipper = useDocValuesSkipper; } @Override @@ -135,7 +157,12 @@ public class TimeSeriesIdFieldMapper extends MetadataFieldMapper { } else { timeSeriesId = routingPathFields.buildHash().toBytesRef(); } - context.doc().add(new SortedDocValuesField(fieldType().name(), timeSeriesId)); + + if (this.useDocValuesSkipper) { + context.doc().add(SortedDocValuesField.indexedField(fieldType().name(), timeSeriesId)); + } else { + context.doc().add(new SortedDocValuesField(fieldType().name(), timeSeriesId)); + } BytesRef uidEncoded = TsidExtractingIdFieldMapper.createField( context, diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerTests.java index 0a1fec073913..f2ca22937c80 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconcilerTests.java @@ -112,6 +112,8 @@ import static org.mockito.Mockito.when; public class DesiredBalanceReconcilerTests extends ESAllocationTestCase { + private static AtomicReference ALLOCATION_STATS_PLACEHOLDER = new AtomicReference<>(); + public void testNoChangesOnEmptyDesiredBalance() { final var clusterState = DesiredBalanceComputerTests.createInitialClusterState(3); final var routingAllocation = createRoutingAllocationFrom(clusterState); @@ -252,8 +254,9 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase { (indexName, nodeId) -> indexName.equals("index-0") && nodeId.equals("node-0") ); + AtomicReference allocationStats = new AtomicReference<>(); final var allocationService = createTestAllocationService( - routingAllocation -> reconcile(routingAllocation, desiredBalance), + routingAllocation -> reconcile(routingAllocation, desiredBalance, allocationStats), new SameShardAllocationDecider(clusterSettings), new ReplicaAfterPrimaryActiveAllocationDecider(), new ThrottlingAllocationDecider(clusterSettings), @@ -277,6 +280,8 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase { final var index1RoutingTable = stateWithStartedPrimary.routingTable(project1).shardRoutingTable("index-1", 0); assertTrue(index1RoutingTable.primaryShard().unassigned()); assertTrue(index1RoutingTable.replicaShards().stream().allMatch(ShardRouting::unassigned)); + assertNotNull(allocationStats.get()); + assertEquals(new DesiredBalanceMetrics.AllocationStats(3, 1, 0), allocationStats.get()); } // now relax the filter so that the replica of index-0 and the primary of index-1 can both be assigned to node-1, but the throttle @@ -290,6 +295,8 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase { final var index1RoutingTable = stateWithInitializingSecondPrimary.routingTable(project1).shardRoutingTable("index-1", 0); assertTrue(index1RoutingTable.primaryShard().initializing()); assertTrue(index1RoutingTable.replicaShards().stream().allMatch(ShardRouting::unassigned)); + assertNotNull(allocationStats.get()); + assertEquals(new DesiredBalanceMetrics.AllocationStats(2, 2, 0), allocationStats.get()); } final var stateWithStartedPrimariesAndInitializingReplica = startInitializingShardsAndReroute( @@ -305,6 +312,8 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase { .shardRoutingTable("index-1", 0); assertTrue(index1RoutingTable.primaryShard().started()); assertTrue(index1RoutingTable.replicaShards().stream().allMatch(ShardRouting::unassigned)); + assertNotNull(allocationStats.get()); + assertEquals(new DesiredBalanceMetrics.AllocationStats(1, 3, 0), allocationStats.get()); } } @@ -834,6 +843,9 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase { } public void testMoveShards() { + /** + * Set up 4 nodes and an index of 3 shards with 1 replica each (6 shard copies). + */ final var discoveryNodes = discoveryNodes(4); final var metadata = Metadata.builder(); final var routingTable = RoutingTable.builder(TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY); @@ -858,11 +870,13 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase { .build(); final var clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + // Set up overriding AllocationDecider#canAllocate decisions for a shard. final var canAllocateRef = new AtomicReference<>(Decision.YES); final var desiredBalance = new AtomicReference<>(desiredBalance(clusterState, (shardId, nodeId) -> true)); + AtomicReference allocationStats = new AtomicReference<>(); final var allocationService = createTestAllocationService( - routingAllocation -> reconcile(routingAllocation, desiredBalance.get()), + routingAllocation -> reconcile(routingAllocation, desiredBalance.get(), allocationStats), new SameShardAllocationDecider(clusterSettings), new ReplicaAfterPrimaryActiveAllocationDecider(), new ThrottlingAllocationDecider(clusterSettings), @@ -892,7 +906,10 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase { assertTrue(shardRouting.started()); assertThat(shardRouting.currentNodeId(), oneOf("node-0", "node-1")); } + assertNotNull(allocationStats); + assertEquals(new DesiredBalanceMetrics.AllocationStats(0, 6, 0), allocationStats.get()); + // Only allow allocation on two of the nodes, excluding the other two nodes. clusterSettings.applySettings( Settings.builder() .putList( @@ -905,6 +922,8 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase { assertSame(clusterState, allocationService.reroute(clusterState, "test", ActionListener.noop())); // all still on desired nodes, no // movement needed + assertNotNull(allocationStats); + assertEquals(new DesiredBalanceMetrics.AllocationStats(0, 6, 0), allocationStats.get()); desiredBalance.set(desiredBalance(clusterState, (shardId, nodeId) -> nodeId.equals("node-2") || nodeId.equals("node-3"))); @@ -913,10 +932,14 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase { final var reroutedState = allocationService.reroute(clusterState, "test", ActionListener.noop()); assertThat(reroutedState.getRoutingNodes().node("node-0").numberOfShardsWithState(ShardRoutingState.RELOCATING), equalTo(1)); assertThat(reroutedState.getRoutingNodes().node("node-1").numberOfShardsWithState(ShardRoutingState.RELOCATING), equalTo(1)); + assertNotNull(allocationStats); + // Total allocations counts relocating and intializing shards, so the two relocating shards will be counted twice. + assertEquals(new DesiredBalanceMetrics.AllocationStats(0, 8, 4), allocationStats.get()); // Ensuring that we check the shortcut two-param canAllocate() method up front canAllocateRef.set(Decision.NO); assertSame(clusterState, allocationService.reroute(clusterState, "test", ActionListener.noop())); + assertEquals(new DesiredBalanceMetrics.AllocationStats(0, 6, 6), allocationStats.get()); canAllocateRef.set(Decision.YES); // Restore filter to default @@ -954,6 +977,8 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase { "test", ActionListener.noop() ); + assertEquals(new DesiredBalanceMetrics.AllocationStats(0, 7, 3), allocationStats.get()); + assertThat(shuttingDownState.getRoutingNodes().node("node-2").numberOfShardsWithState(ShardRoutingState.INITIALIZING), equalTo(1)); } @@ -982,11 +1007,13 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase { final var desiredBalance = new AtomicReference<>( desiredBalance(clusterState, (shardId, nodeId) -> nodeId.equals("node-0") || nodeId.equals("node-1")) ); + AtomicReference allocationStats = new AtomicReference<>(); final var allocationService = createTestAllocationService( - routingAllocation -> reconcile(routingAllocation, desiredBalance.get()), + routingAllocation -> reconcile(routingAllocation, desiredBalance.get(), allocationStats), new SameShardAllocationDecider(clusterSettings), new ReplicaAfterPrimaryActiveAllocationDecider(), new ThrottlingAllocationDecider(clusterSettings), + new ConcurrentRebalanceAllocationDecider(clusterSettings), new AllocationDecider() { @Override public Decision canRebalance(RoutingAllocation allocation) { @@ -1016,24 +1043,28 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase { assertThat(shardRouting.currentNodeId(), oneOf("node-0", "node-1")); } - assertSame(clusterState, allocationService.reroute(clusterState, "test", ActionListener.noop())); // all still on desired nodes, no - // movement needed + // All still on desired nodes, no movement needed, cluster state remains the same. + assertSame(clusterState, allocationService.reroute(clusterState, "test", ActionListener.noop())); + assertEquals(new DesiredBalanceMetrics.AllocationStats(0, 6, 0), allocationStats.get()); desiredBalance.set(desiredBalance(clusterState, (shardId, nodeId) -> nodeId.equals("node-2") || nodeId.equals("node-3"))); canRebalanceGlobalRef.set(Decision.NO); - assertSame(clusterState, allocationService.reroute(clusterState, "test", ActionListener.noop())); // rebalancing forbidden on all - // shards, no movement + // rebalancing forbidden on all shards, no movement allowed, cluster state remains the same. + assertSame(clusterState, allocationService.reroute(clusterState, "test", ActionListener.noop())); + // assertEquals(new DesiredBalanceMetrics.AllocationStats(0, 6, 6), allocationStats.get()); canRebalanceGlobalRef.set(Decision.YES); canRebalanceShardRef.set(Decision.NO); - assertSame(clusterState, allocationService.reroute(clusterState, "test", ActionListener.noop())); // rebalancing forbidden on - // specific shards, no movement + // rebalancing forbidden on specific shards, still no movement. + assertSame(clusterState, allocationService.reroute(clusterState, "test", ActionListener.noop())); + // assertEquals(new DesiredBalanceMetrics.AllocationStats(0, 6, 6), allocationStats.get()); canRebalanceShardRef.set(Decision.YES); canAllocateShardRef.set(Decision.NO); - assertSame(clusterState, allocationService.reroute(clusterState, "test", ActionListener.noop())); // allocation not possible, no - // movement + // allocation not possible, no movement + assertSame(clusterState, allocationService.reroute(clusterState, "test", ActionListener.noop())); + // assertEquals(new DesiredBalanceMetrics.AllocationStats(0, 6, 6), allocationStats.get()); canAllocateShardRef.set(Decision.YES); // The next reroute starts moving shards to node-2 and node-3, but interleaves the decisions between node-0 and node-1 for fairness. @@ -1041,6 +1072,16 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase { final var reroutedState = allocationService.reroute(clusterState, "test", ActionListener.noop()); assertThat(reroutedState.getRoutingNodes().node("node-0").numberOfShardsWithState(ShardRoutingState.RELOCATING), equalTo(1)); assertThat(reroutedState.getRoutingNodes().node("node-1").numberOfShardsWithState(ShardRoutingState.RELOCATING), equalTo(1)); + assertNotNull(allocationStats.get()); + assertEquals(new DesiredBalanceMetrics.AllocationStats(0, 6, 6), allocationStats.get()); + + // Test that the AllocationStats are still updated, even though throttling is active. The cluster state should remain unchanged + // because due to throttling: the previous reroute request started relocating two shards and, since those reallocations have not + // been completed, no additional shard relocations can begin. + assertSame(reroutedState, allocationService.reroute(reroutedState, "test", ActionListener.noop())); + assertNotNull(allocationStats); + // Note: total allocations counts relocating and intializing shards, so the two relocating shards will be counted twice. + assertEquals(new DesiredBalanceMetrics.AllocationStats(0, 8, 4), allocationStats.get()); } public void testDoNotRebalanceToTheNodeThatNoLongerExists() { @@ -1245,12 +1286,14 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase { while (true) { var allocation = createRoutingAllocationFrom(clusterState, deciders); - reconciler.reconcile(balance, allocation); + var allocationStats = reconciler.reconcile(balance, allocation); var initializing = shardsWithState(allocation.routingNodes(), ShardRoutingState.INITIALIZING); if (initializing.isEmpty()) { + assertEquals(new DesiredBalanceMetrics.AllocationStats(0, shardsPerNode * numberOfNodes, 0), allocationStats); break; } + for (ShardRouting shardRouting : initializing) { totalOutgoingMoves.get(shardRouting.relocatingNodeId()).incrementAndGet(); allocation.routingNodes().startShard(shardRouting, allocation.changes(), 0L); @@ -1366,11 +1409,24 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase { } private static void reconcile(RoutingAllocation routingAllocation, DesiredBalance desiredBalance) { - final var threadPool = mock(ThreadPool.class); - when(threadPool.relativeTimeInMillisSupplier()).thenReturn(new AtomicLong()::incrementAndGet); - new DesiredBalanceReconciler(createBuiltInClusterSettings(), threadPool).reconcile(desiredBalance, routingAllocation); + reconcile(routingAllocation, desiredBalance, ALLOCATION_STATS_PLACEHOLDER); } + private static void reconcile( + RoutingAllocation routingAllocation, + DesiredBalance desiredBalance, + AtomicReference allocationStatsAtomicReference + ) { + final var threadPool = mock(ThreadPool.class); + when(threadPool.relativeTimeInMillisSupplier()).thenReturn(new AtomicLong()::incrementAndGet); + allocationStatsAtomicReference.set( + new DesiredBalanceReconciler(createBuiltInClusterSettings(), threadPool).reconcile(desiredBalance, routingAllocation) + ); + } + + /** + * Returns whether the node's shards are all desired assignments. + */ private static boolean isReconciled(RoutingNode node, DesiredBalance balance) { for (ShardRouting shardRouting : node) { if (balance.assignments().get(shardRouting.shardId()).nodeIds().contains(node.nodeId()) == false) { @@ -1512,6 +1568,10 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase { .build(); } + /** + * Settings that limit concurrency on each node to: a single primary shard recovery from local disk; a single shard move as a source + * node; a single shard move as the destination node. + */ private static Settings throttleSettings() { return Settings.builder() .put(CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING.getKey(), 1) diff --git a/test/fixtures/hdfs-fixture/build.gradle b/test/fixtures/hdfs-fixture/build.gradle index 857f497b2908..6360ead8126f 100644 --- a/test/fixtures/hdfs-fixture/build.gradle +++ b/test/fixtures/hdfs-fixture/build.gradle @@ -13,13 +13,36 @@ apply plugin: 'com.gradleup.shadow' import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar +def patched = Attribute.of('patched', Boolean) configurations { - hdfs2 - hdfs3 + hdfs2 { + attributes { + attribute(patched, true) + } + } + hdfs3 { + attributes { + attribute(patched, true) + } + } consumable("shadowedHdfs2") } dependencies { + attributesSchema { + attribute(patched) + } + artifactTypes.getByName("jar") { + attributes.attribute(patched, false) + } + registerTransform(org.elasticsearch.gradle.internal.dependencies.patches.hdfs.HdfsClassPatcher) { + from.attribute(patched, false) + to.attribute(patched, true) + parameters { + matchingArtifacts = ["hadoop-common"] + } + } + compileOnly("org.apache.hadoop:hadoop-minicluster:2.8.5") api("com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}") { transitive = false diff --git a/x-pack/plugin/blob-cache/src/main/plugin-metadata/entitlement-policy.yaml b/x-pack/plugin/blob-cache/src/main/plugin-metadata/entitlement-policy.yaml index 34bac2441161..c24a99d12136 100644 --- a/x-pack/plugin/blob-cache/src/main/plugin-metadata/entitlement-policy.yaml +++ b/x-pack/plugin/blob-cache/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,5 +1,5 @@ org.elasticsearch.blobcache: - files: - - relative_path: "shared_snapshot_cache" - relative_to: "data" - mode: "read_write" + - relative_path: "" + relative_to: data + mode: read_write diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecatedIndexPredicate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecatedIndexPredicate.java index 32650a6d6c4a..b834e86955f3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecatedIndexPredicate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecatedIndexPredicate.java @@ -49,10 +49,15 @@ public class DeprecatedIndexPredicate { */ public static boolean reindexRequired(IndexMetadata indexMetadata, boolean filterToBlockedStatus) { return creationVersionBeforeMinimumWritableVersion(indexMetadata) + && isNotSystem(indexMetadata) && isNotSearchableSnapshot(indexMetadata) && matchBlockedStatus(indexMetadata, filterToBlockedStatus); } + private static boolean isNotSystem(IndexMetadata indexMetadata) { + return indexMetadata.isSystem() == false; + } + private static boolean isNotSearchableSnapshot(IndexMetadata indexMetadata) { return indexMetadata.isSearchableSnapshot() == false; } diff --git a/x-pack/plugin/core/src/main/plugin-metadata/entitlement-policy.yaml b/x-pack/plugin/core/src/main/plugin-metadata/entitlement-policy.yaml index a069e1b4ce4c..f543e39972a7 100644 --- a/x-pack/plugin/core/src/main/plugin-metadata/entitlement-policy.yaml +++ b/x-pack/plugin/core/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,3 +1,13 @@ +org.elasticsearch.xcore: + - files: + - relative_path: "" + relative_to: config + mode: read +org.elasticsearch.sslconfig: + - files: + - relative_path: "" + relative_to: config + mode: read org.apache.httpcomponents.httpclient: - outbound_network # For SamlRealm - manage_threads diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecker.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecker.java index 48cd4294cab7..3aa587d3cc6e 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecker.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecker.java @@ -72,12 +72,14 @@ public class DataStreamDeprecationChecker implements ResourceDeprecationChecker Map> dataStreamIssues = new HashMap<>(); for (String dataStreamName : dataStreamNames) { DataStream dataStream = clusterState.metadata().getProject().dataStreams().get(dataStreamName); - List issuesForSingleDataStream = DATA_STREAM_CHECKS.stream() - .map(c -> c.apply(dataStream, clusterState)) - .filter(Objects::nonNull) - .toList(); - if (issuesForSingleDataStream.isEmpty() == false) { - dataStreamIssues.put(dataStreamName, issuesForSingleDataStream); + if (dataStream.isSystem() == false) { + List issuesForSingleDataStream = DATA_STREAM_CHECKS.stream() + .map(c -> c.apply(dataStream, clusterState)) + .filter(Objects::nonNull) + .toList(); + if (issuesForSingleDataStream.isEmpty() == false) { + dataStreamIssues.put(dataStreamName, issuesForSingleDataStream); + } } } return dataStreamIssues.isEmpty() ? Map.of() : dataStreamIssues; diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationCheckerTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationCheckerTests.java index a8dd1d464e30..e0dfaef605af 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationCheckerTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationCheckerTests.java @@ -302,4 +302,52 @@ public class DataStreamDeprecationCheckerTests extends ESTestCase { assertThat(issuesByDataStream.get(dataStream.getName()), equalTo(List.of(expected))); } + public void testOldSystemDataStreamIgnored() { + // We do not want system data streams coming back in the deprecation info API + int oldIndexCount = randomIntBetween(1, 100); + int newIndexCount = randomIntBetween(1, 100); + List allIndices = new ArrayList<>(); + Map nameToIndexMetadata = new HashMap<>(); + for (int i = 0; i < oldIndexCount; i++) { + Settings.Builder settings = settings(IndexVersion.fromId(7170099)); + + String indexName = "old-data-stream-index-" + i; + settings.put(MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.getKey(), true); + + IndexMetadata oldIndexMetadata = IndexMetadata.builder(indexName) + .settings(settings) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + allIndices.add(oldIndexMetadata.getIndex()); + nameToIndexMetadata.put(oldIndexMetadata.getIndex().getName(), oldIndexMetadata); + } + for (int i = 0; i < newIndexCount; i++) { + Index newIndex = createNewIndex(i, false, nameToIndexMetadata); + allIndices.add(newIndex); + } + DataStream dataStream = new DataStream( + randomAlphaOfLength(10), + allIndices, + randomNegativeLong(), + Map.of(), + true, + false, + true, + randomBoolean(), + randomFrom(IndexMode.values()), + null, + randomFrom(DataStreamOptions.EMPTY, DataStreamOptions.FAILURE_STORE_DISABLED, DataStreamOptions.FAILURE_STORE_ENABLED, null), + List.of(), + randomBoolean(), + null + ); + Metadata metadata = Metadata.builder() + .indices(nameToIndexMetadata) + .dataStreams(Map.of(dataStream.getName(), dataStream), Map.of()) + .build(); + ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); + assertThat(checker.check(clusterState), equalTo(Map.of())); + } + } diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationCheckerTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationCheckerTests.java index c940f50a2e17..3c1b243be64c 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationCheckerTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationCheckerTests.java @@ -298,6 +298,28 @@ public class IndexDeprecationCheckerTests extends ESTestCase { assertEquals(List.of(expected), issuesByIndex.get("test")); } + public void testOldSystemIndicesIgnored() { + // We do not want system indices coming back in the deprecation info API + Settings.Builder settings = settings(OLD_VERSION).put(MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.getKey(), true); + IndexMetadata indexMetadata = IndexMetadata.builder("test") + .system(true) + .settings(settings) + .numberOfShards(1) + .numberOfReplicas(0) + .state(indexMetdataState) + .build(); + ClusterState clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata(Metadata.builder().put(indexMetadata, true)) + .blocks(clusterBlocksForIndices(indexMetadata)) + .build(); + Map> issuesByIndex = checker.check( + clusterState, + new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS), + emptyPrecomputedData + ); + assertThat(issuesByIndex, equalTo(Map.of())); + } + private IndexMetadata readonlyIndexMetadata(String indexName, IndexVersion indexVersion) { Settings.Builder settings = settings(indexVersion).put(MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.getKey(), true); return IndexMetadata.builder(indexName).settings(settings).numberOfShards(1).numberOfReplicas(0).state(indexMetdataState).build(); diff --git a/x-pack/plugin/identity-provider/src/main/plugin-metadata/entitlement-policy.yaml b/x-pack/plugin/identity-provider/src/main/plugin-metadata/entitlement-policy.yaml index ee6094d2ffef..c5e32cf27b1e 100644 --- a/x-pack/plugin/identity-provider/src/main/plugin-metadata/entitlement-policy.yaml +++ b/x-pack/plugin/identity-provider/src/main/plugin-metadata/entitlement-policy.yaml @@ -3,3 +3,7 @@ ALL-UNNAMED: - write_system_properties: properties: - org.apache.xml.security.ignoreLineBreaks + - files: + - relative_path: "" + relative_to: config + mode: read diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java index fee66a9f84ac..737c549255a7 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java @@ -54,6 +54,7 @@ import org.elasticsearch.xpack.inference.services.elastic.authorization.ElasticI import org.elasticsearch.xpack.inference.services.elastic.completion.ElasticInferenceServiceCompletionModel; import org.elasticsearch.xpack.inference.services.elastic.completion.ElasticInferenceServiceCompletionServiceSettings; import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; +import org.elasticsearch.xpack.inference.services.validation.ModelValidatorBuilder; import org.elasticsearch.xpack.inference.telemetry.TraceContext; import java.util.ArrayList; @@ -557,11 +558,8 @@ public class ElasticInferenceService extends SenderService { @Override public void checkModelConfig(Model model, ActionListener listener) { - if (model instanceof ElasticInferenceServiceSparseEmbeddingsModel embeddingsModel) { - listener.onResponse(updateModelWithEmbeddingDetails(embeddingsModel)); - } else { - listener.onResponse(model); - } + // TODO: Remove this function once all services have been updated to use the new model validators + ModelValidatorBuilder.buildModelValidator(model.getTaskType()).validate(this, model, listener); } private static List translateToChunkedResults(InferenceInputs inputs, InferenceServiceResults inferenceResults) { @@ -576,18 +574,6 @@ public class ElasticInferenceService extends SenderService { } } - private ElasticInferenceServiceSparseEmbeddingsModel updateModelWithEmbeddingDetails( - ElasticInferenceServiceSparseEmbeddingsModel model - ) { - ElasticInferenceServiceSparseEmbeddingsServiceSettings serviceSettings = new ElasticInferenceServiceSparseEmbeddingsServiceSettings( - model.getServiceSettings().modelId(), - model.getServiceSettings().maxInputTokens(), - model.getServiceSettings().rateLimitSettings() - ); - - return new ElasticInferenceServiceSparseEmbeddingsModel(model, serviceSettings); - } - private TraceContext getCurrentTraceInfo() { var threadPool = getServiceComponents().threadPool(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionServiceSettings.java index 293ca1bcb41c..f062a57d03f8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/completion/ElasticInferenceServiceCompletionServiceSettings.java @@ -35,7 +35,6 @@ public class ElasticInferenceServiceCompletionServiceSettings extends FilteredXC public static final String NAME = "elastic_inference_service_completion_service_settings"; - // TODO what value do we put here? private static final RateLimitSettings DEFAULT_RATE_LIMIT_SETTINGS = new RateLimitSettings(720L); public static ElasticInferenceServiceCompletionServiceSettings fromMap(Map map, ConfigurationParseContext context) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java index 414c2a3f943d..5d98a90ec2bf 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java @@ -317,7 +317,21 @@ public class ElasticInferenceServiceTests extends ESTestCase { public void testCheckModelConfig_ReturnsNewModelReference() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + try (var service = createService(senderFactory, getUrl(webServer))) { + String responseJson = """ + { + "data": [ + { + "hello": 2.1259406, + "greet": 1.7073475 + } + ] + } + """; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer), "my-model-id"); PlainActionFuture listener = new PlainActionFuture<>(); service.checkModelConfig(model, listener); diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/GetMigrationReindexStatusTransportAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/GetMigrationReindexStatusTransportAction.java index f5e869e50580..39d604b7a206 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/GetMigrationReindexStatusTransportAction.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/GetMigrationReindexStatusTransportAction.java @@ -174,7 +174,7 @@ public class GetMigrationReindexStatusTransportAction extends HandledTransportAc if (sourceIndexStats == null) { totalDocsInIndex = 0; } else { - DocsStats totalDocsStats = sourceIndexStats.getTotal().getDocs(); + DocsStats totalDocsStats = sourceIndexStats.getPrimaries().getDocs(); totalDocsInIndex = totalDocsStats == null ? 0 : totalDocsStats.getCount(); } IndexStats migratedIndexStats = indicesStatsResponse.getIndex( @@ -184,7 +184,7 @@ public class GetMigrationReindexStatusTransportAction extends HandledTransportAc if (migratedIndexStats == null) { reindexedDocsInIndex = 0; } else { - DocsStats reindexedDocsStats = migratedIndexStats.getTotal().getDocs(); + DocsStats reindexedDocsStats = migratedIndexStats.getPrimaries().getDocs(); reindexedDocsInIndex = reindexedDocsStats == null ? 0 : reindexedDocsStats.getCount(); } inProgressMap.put(index, Tuple.tuple(totalDocsInIndex, reindexedDocsInIndex)); diff --git a/x-pack/plugin/ml/src/main/plugin-metadata/entitlement-policy.yaml b/x-pack/plugin/ml/src/main/plugin-metadata/entitlement-policy.yaml index ff8f2a8f73ea..664b7d83315d 100644 --- a/x-pack/plugin/ml/src/main/plugin-metadata/entitlement-policy.yaml +++ b/x-pack/plugin/ml/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,2 +1,9 @@ org.elasticsearch.ml: - manage_threads + - files: + - relative_path: mlmodel.conf + relative_to: config + mode: read + - relative_path: "ml-local-data/" + relative_to: data + mode: read_write diff --git a/x-pack/plugin/searchable-snapshots/src/main/plugin-metadata/entitlement-policy.yaml b/x-pack/plugin/searchable-snapshots/src/main/plugin-metadata/entitlement-policy.yaml new file mode 100644 index 000000000000..69eead670711 --- /dev/null +++ b/x-pack/plugin/searchable-snapshots/src/main/plugin-metadata/entitlement-policy.yaml @@ -0,0 +1,8 @@ +org.elasticsearch.searchablesnapshots: + - files: + - relative_path: snapshot_cache + relative_to: data + mode: read_write + - relative_path: indices + relative_to: data + mode: read_write diff --git a/x-pack/plugin/security/src/main/plugin-metadata/entitlement-policy.yaml b/x-pack/plugin/security/src/main/plugin-metadata/entitlement-policy.yaml index 1897e826313a..99dd7d5c1380 100644 --- a/x-pack/plugin/security/src/main/plugin-metadata/entitlement-policy.yaml +++ b/x-pack/plugin/security/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,5 +1,9 @@ org.elasticsearch.security: - set_https_connection_properties # for CommandLineHttpClient + - files: + - relative_path: "" + relative_to: config + mode: read io.netty.transport: - manage_threads - inbound_network @@ -13,6 +17,8 @@ io.netty.common: mode: "read" - path: "/usr/lib/os-release" mode: "read" + - path: "/proc/sys/net/core/somaxconn" + mode: read org.opensaml.xmlsec.impl: - write_system_properties: properties: