mirror of
https://github.com/elastic/elasticsearch.git
synced 2025-06-28 09:28:55 -04:00
Merge main into multi-project
This commit is contained in:
commit
4f918a81b9
70 changed files with 2418 additions and 498 deletions
|
@ -0,0 +1,147 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the "Elastic License
|
||||||
|
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||||
|
* Public License v 1"; you may not use this file except in compliance with, at
|
||||||
|
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||||
|
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.gradle.internal.dependencies.patches.hdfs;
|
||||||
|
|
||||||
|
import org.gradle.api.artifacts.transform.CacheableTransform;
|
||||||
|
import org.gradle.api.artifacts.transform.InputArtifact;
|
||||||
|
import org.gradle.api.artifacts.transform.TransformAction;
|
||||||
|
import org.gradle.api.artifacts.transform.TransformOutputs;
|
||||||
|
import org.gradle.api.artifacts.transform.TransformParameters;
|
||||||
|
import org.gradle.api.file.FileSystemLocation;
|
||||||
|
import org.gradle.api.provider.Provider;
|
||||||
|
import org.gradle.api.tasks.Classpath;
|
||||||
|
import org.gradle.api.tasks.Input;
|
||||||
|
import org.gradle.api.tasks.Optional;
|
||||||
|
import org.jetbrains.annotations.NotNull;
|
||||||
|
import org.objectweb.asm.ClassReader;
|
||||||
|
import org.objectweb.asm.ClassVisitor;
|
||||||
|
import org.objectweb.asm.ClassWriter;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.FileOutputStream;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.util.Enumeration;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Locale;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.function.Function;
|
||||||
|
import java.util.jar.JarEntry;
|
||||||
|
import java.util.jar.JarFile;
|
||||||
|
import java.util.jar.JarOutputStream;
|
||||||
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
|
import static java.util.Map.entry;
|
||||||
|
|
||||||
|
@CacheableTransform
|
||||||
|
public abstract class HdfsClassPatcher implements TransformAction<HdfsClassPatcher.Parameters> {
|
||||||
|
|
||||||
|
record JarPatchers(String artifactTag, Pattern artifactPattern, Map<String, Function<ClassWriter, ClassVisitor>> jarPatchers) {}
|
||||||
|
|
||||||
|
static final List<JarPatchers> allPatchers = List.of(
|
||||||
|
new JarPatchers(
|
||||||
|
"hadoop-common",
|
||||||
|
Pattern.compile("hadoop-common-(?!.*tests)"),
|
||||||
|
Map.ofEntries(
|
||||||
|
entry("org/apache/hadoop/util/ShutdownHookManager.class", ShutdownHookManagerPatcher::new),
|
||||||
|
entry("org/apache/hadoop/util/Shell.class", ShellPatcher::new),
|
||||||
|
entry("org/apache/hadoop/security/UserGroupInformation.class", SubjectGetSubjectPatcher::new)
|
||||||
|
)
|
||||||
|
),
|
||||||
|
new JarPatchers(
|
||||||
|
"hadoop-client-api",
|
||||||
|
Pattern.compile("hadoop-client-api.*"),
|
||||||
|
Map.ofEntries(
|
||||||
|
entry("org/apache/hadoop/util/ShutdownHookManager.class", ShutdownHookManagerPatcher::new),
|
||||||
|
entry("org/apache/hadoop/util/Shell.class", ShellPatcher::new),
|
||||||
|
entry("org/apache/hadoop/security/UserGroupInformation.class", SubjectGetSubjectPatcher::new),
|
||||||
|
entry("org/apache/hadoop/security/authentication/client/KerberosAuthenticator.class", SubjectGetSubjectPatcher::new)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
interface Parameters extends TransformParameters {
|
||||||
|
@Input
|
||||||
|
@Optional
|
||||||
|
List<String> getMatchingArtifacts();
|
||||||
|
|
||||||
|
void setMatchingArtifacts(List<String> matchingArtifacts);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Classpath
|
||||||
|
@InputArtifact
|
||||||
|
public abstract Provider<FileSystemLocation> getInputArtifact();
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void transform(@NotNull TransformOutputs outputs) {
|
||||||
|
File inputFile = getInputArtifact().get().getAsFile();
|
||||||
|
|
||||||
|
List<String> matchingArtifacts = getParameters().getMatchingArtifacts();
|
||||||
|
List<JarPatchers> patchersToApply = allPatchers.stream()
|
||||||
|
.filter(jp -> matchingArtifacts.contains(jp.artifactTag()) && jp.artifactPattern().matcher(inputFile.getName()).find())
|
||||||
|
.toList();
|
||||||
|
if (patchersToApply.isEmpty()) {
|
||||||
|
outputs.file(getInputArtifact());
|
||||||
|
} else {
|
||||||
|
patchersToApply.forEach(patchers -> {
|
||||||
|
System.out.println("Patching " + inputFile.getName());
|
||||||
|
|
||||||
|
Map<String, Function<ClassWriter, ClassVisitor>> jarPatchers = new HashMap<>(patchers.jarPatchers());
|
||||||
|
File outputFile = outputs.file(inputFile.getName().replace(".jar", "-patched.jar"));
|
||||||
|
|
||||||
|
patchJar(inputFile, outputFile, jarPatchers);
|
||||||
|
|
||||||
|
if (jarPatchers.isEmpty() == false) {
|
||||||
|
throw new IllegalArgumentException(
|
||||||
|
String.format(
|
||||||
|
Locale.ROOT,
|
||||||
|
"error patching [%s] with [%s]: the jar does not contain [%s]",
|
||||||
|
inputFile.getName(),
|
||||||
|
patchers.artifactPattern().toString(),
|
||||||
|
String.join(", ", jarPatchers.keySet())
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void patchJar(File inputFile, File outputFile, Map<String, Function<ClassWriter, ClassVisitor>> jarPatchers) {
|
||||||
|
try (JarFile jarFile = new JarFile(inputFile); JarOutputStream jos = new JarOutputStream(new FileOutputStream(outputFile))) {
|
||||||
|
Enumeration<JarEntry> entries = jarFile.entries();
|
||||||
|
while (entries.hasMoreElements()) {
|
||||||
|
JarEntry entry = entries.nextElement();
|
||||||
|
String entryName = entry.getName();
|
||||||
|
// Add the entry to the new JAR file
|
||||||
|
jos.putNextEntry(new JarEntry(entryName));
|
||||||
|
|
||||||
|
Function<ClassWriter, ClassVisitor> classPatcher = jarPatchers.remove(entryName);
|
||||||
|
if (classPatcher != null) {
|
||||||
|
byte[] classToPatch = jarFile.getInputStream(entry).readAllBytes();
|
||||||
|
|
||||||
|
ClassReader classReader = new ClassReader(classToPatch);
|
||||||
|
ClassWriter classWriter = new ClassWriter(classReader, 0);
|
||||||
|
classReader.accept(classPatcher.apply(classWriter), 0);
|
||||||
|
|
||||||
|
jos.write(classWriter.toByteArray());
|
||||||
|
} else {
|
||||||
|
// Read the entry's data and write it to the new JAR
|
||||||
|
try (InputStream is = jarFile.getInputStream(entry)) {
|
||||||
|
is.transferTo(jos);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
jos.closeEntry();
|
||||||
|
}
|
||||||
|
} catch (IOException ex) {
|
||||||
|
throw new RuntimeException(ex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -7,7 +7,7 @@
|
||||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.elasticsearch.hdfs.patch;
|
package org.elasticsearch.gradle.internal.dependencies.patches.hdfs;
|
||||||
|
|
||||||
import org.objectweb.asm.MethodVisitor;
|
import org.objectweb.asm.MethodVisitor;
|
||||||
import org.objectweb.asm.Opcodes;
|
import org.objectweb.asm.Opcodes;
|
|
@ -7,7 +7,7 @@
|
||||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.elasticsearch.hdfs.patch;
|
package org.elasticsearch.gradle.internal.dependencies.patches.hdfs;
|
||||||
|
|
||||||
import org.objectweb.asm.ClassVisitor;
|
import org.objectweb.asm.ClassVisitor;
|
||||||
import org.objectweb.asm.ClassWriter;
|
import org.objectweb.asm.ClassWriter;
|
|
@ -7,7 +7,7 @@
|
||||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.elasticsearch.hdfs.patch;
|
package org.elasticsearch.gradle.internal.dependencies.patches.hdfs;
|
||||||
|
|
||||||
import org.objectweb.asm.ClassVisitor;
|
import org.objectweb.asm.ClassVisitor;
|
||||||
import org.objectweb.asm.ClassWriter;
|
import org.objectweb.asm.ClassWriter;
|
|
@ -7,7 +7,7 @@
|
||||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.elasticsearch.hdfs.patch;
|
package org.elasticsearch.gradle.internal.dependencies.patches.hdfs;
|
||||||
|
|
||||||
import org.objectweb.asm.ClassVisitor;
|
import org.objectweb.asm.ClassVisitor;
|
||||||
import org.objectweb.asm.ClassWriter;
|
import org.objectweb.asm.ClassWriter;
|
|
@ -180,7 +180,7 @@ final class SystemJvmOptions {
|
||||||
}
|
}
|
||||||
// We instrument classes in these modules to call the bridge. Because the bridge gets patched
|
// We instrument classes in these modules to call the bridge. Because the bridge gets patched
|
||||||
// into java.base, we must export the bridge from java.base to these modules, as a comma-separated list
|
// into java.base, we must export the bridge from java.base to these modules, as a comma-separated list
|
||||||
String modulesContainingEntitlementInstrumentation = "java.logging,java.net.http,java.naming";
|
String modulesContainingEntitlementInstrumentation = "java.logging,java.net.http,java.naming,jdk.net";
|
||||||
return Stream.of(
|
return Stream.of(
|
||||||
"-Des.entitlements.enabled=true",
|
"-Des.entitlements.enabled=true",
|
||||||
"-XX:+EnableDynamicAgentLoading",
|
"-XX:+EnableDynamicAgentLoading",
|
||||||
|
|
5
docs/changelog/117642.yaml
Normal file
5
docs/changelog/117642.yaml
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
pr: 117642
|
||||||
|
summary: Adding endpoint creation validation to `ElasticInferenceService`
|
||||||
|
area: Machine Learning
|
||||||
|
type: enhancement
|
||||||
|
issues: []
|
5
docs/changelog/122458.yaml
Normal file
5
docs/changelog/122458.yaml
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
pr: 122458
|
||||||
|
summary: '`DesiredBalanceReconciler` always returns `AllocationStats`'
|
||||||
|
area: Allocation
|
||||||
|
type: bug
|
||||||
|
issues: []
|
6
docs/changelog/122951.yaml
Normal file
6
docs/changelog/122951.yaml
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
pr: 122951
|
||||||
|
summary: Updates the deprecation info API to not warn about system indices and data
|
||||||
|
streams
|
||||||
|
area: Indices APIs
|
||||||
|
type: bug
|
||||||
|
issues: []
|
|
@ -12,6 +12,7 @@ apply plugin: 'elasticsearch.build'
|
||||||
dependencies {
|
dependencies {
|
||||||
compileOnly project(':libs:entitlement')
|
compileOnly project(':libs:entitlement')
|
||||||
compileOnly project(':libs:core')
|
compileOnly project(':libs:core')
|
||||||
|
compileOnly project(':libs:logging')
|
||||||
implementation 'org.ow2.asm:asm:9.7.1'
|
implementation 'org.ow2.asm:asm:9.7.1'
|
||||||
testImplementation project(":test:framework")
|
testImplementation project(":test:framework")
|
||||||
testImplementation project(":libs:entitlement:bridge")
|
testImplementation project(":libs:entitlement:bridge")
|
||||||
|
|
|
@ -15,6 +15,7 @@ module org.elasticsearch.entitlement.instrumentation {
|
||||||
requires org.elasticsearch.entitlement;
|
requires org.elasticsearch.entitlement;
|
||||||
|
|
||||||
requires static org.elasticsearch.base; // for SuppressForbidden
|
requires static org.elasticsearch.base; // for SuppressForbidden
|
||||||
|
requires org.elasticsearch.logging;
|
||||||
|
|
||||||
provides InstrumentationService with InstrumentationServiceImpl;
|
provides InstrumentationService with InstrumentationServiceImpl;
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,6 +12,8 @@ package org.elasticsearch.entitlement.instrumentation.impl;
|
||||||
import org.elasticsearch.entitlement.instrumentation.CheckMethod;
|
import org.elasticsearch.entitlement.instrumentation.CheckMethod;
|
||||||
import org.elasticsearch.entitlement.instrumentation.Instrumenter;
|
import org.elasticsearch.entitlement.instrumentation.Instrumenter;
|
||||||
import org.elasticsearch.entitlement.instrumentation.MethodKey;
|
import org.elasticsearch.entitlement.instrumentation.MethodKey;
|
||||||
|
import org.elasticsearch.logging.LogManager;
|
||||||
|
import org.elasticsearch.logging.Logger;
|
||||||
import org.objectweb.asm.AnnotationVisitor;
|
import org.objectweb.asm.AnnotationVisitor;
|
||||||
import org.objectweb.asm.ClassReader;
|
import org.objectweb.asm.ClassReader;
|
||||||
import org.objectweb.asm.ClassVisitor;
|
import org.objectweb.asm.ClassVisitor;
|
||||||
|
@ -36,6 +38,7 @@ import static org.objectweb.asm.Opcodes.INVOKESTATIC;
|
||||||
import static org.objectweb.asm.Opcodes.INVOKEVIRTUAL;
|
import static org.objectweb.asm.Opcodes.INVOKEVIRTUAL;
|
||||||
|
|
||||||
public class InstrumenterImpl implements Instrumenter {
|
public class InstrumenterImpl implements Instrumenter {
|
||||||
|
private static final Logger logger = LogManager.getLogger(InstrumenterImpl.class);
|
||||||
|
|
||||||
private final String getCheckerClassMethodDescriptor;
|
private final String getCheckerClassMethodDescriptor;
|
||||||
private final String handleClass;
|
private final String handleClass;
|
||||||
|
@ -155,10 +158,10 @@ public class InstrumenterImpl implements Instrumenter {
|
||||||
var key = new MethodKey(className, name, Stream.of(Type.getArgumentTypes(descriptor)).map(Type::getInternalName).toList());
|
var key = new MethodKey(className, name, Stream.of(Type.getArgumentTypes(descriptor)).map(Type::getInternalName).toList());
|
||||||
var instrumentationMethod = checkMethods.get(key);
|
var instrumentationMethod = checkMethods.get(key);
|
||||||
if (instrumentationMethod != null) {
|
if (instrumentationMethod != null) {
|
||||||
// System.out.println("Will instrument method " + key);
|
logger.debug("Will instrument {}", key);
|
||||||
return new EntitlementMethodVisitor(Opcodes.ASM9, mv, isStatic, isCtor, descriptor, instrumentationMethod);
|
return new EntitlementMethodVisitor(Opcodes.ASM9, mv, isStatic, isCtor, descriptor, instrumentationMethod);
|
||||||
} else {
|
} else {
|
||||||
// System.out.println("Will not instrument method " + key);
|
logger.trace("Will not instrument {}", key);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return mv;
|
return mv;
|
||||||
|
|
|
@ -11,6 +11,7 @@
|
||||||
// At build and run time, the bridge is patched into the java.base module.
|
// At build and run time, the bridge is patched into the java.base module.
|
||||||
module org.elasticsearch.entitlement.bridge {
|
module org.elasticsearch.entitlement.bridge {
|
||||||
requires java.net.http;
|
requires java.net.http;
|
||||||
|
requires jdk.net;
|
||||||
|
|
||||||
exports org.elasticsearch.entitlement.bridge;
|
exports org.elasticsearch.entitlement.bridge;
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,11 +9,14 @@
|
||||||
|
|
||||||
package org.elasticsearch.entitlement.bridge;
|
package org.elasticsearch.entitlement.bridge;
|
||||||
|
|
||||||
|
import jdk.nio.Channels;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileDescriptor;
|
import java.io.FileDescriptor;
|
||||||
import java.io.FileFilter;
|
import java.io.FileFilter;
|
||||||
import java.io.FilenameFilter;
|
import java.io.FilenameFilter;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
|
import java.io.OutputStream;
|
||||||
import java.io.PrintStream;
|
import java.io.PrintStream;
|
||||||
import java.io.PrintWriter;
|
import java.io.PrintWriter;
|
||||||
import java.lang.foreign.AddressLayout;
|
import java.lang.foreign.AddressLayout;
|
||||||
|
@ -58,14 +61,22 @@ import java.nio.file.AccessMode;
|
||||||
import java.nio.file.CopyOption;
|
import java.nio.file.CopyOption;
|
||||||
import java.nio.file.DirectoryStream;
|
import java.nio.file.DirectoryStream;
|
||||||
import java.nio.file.FileStore;
|
import java.nio.file.FileStore;
|
||||||
|
import java.nio.file.FileVisitOption;
|
||||||
|
import java.nio.file.FileVisitor;
|
||||||
import java.nio.file.LinkOption;
|
import java.nio.file.LinkOption;
|
||||||
import java.nio.file.OpenOption;
|
import java.nio.file.OpenOption;
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
import java.nio.file.WatchEvent;
|
import java.nio.file.WatchEvent;
|
||||||
import java.nio.file.WatchService;
|
import java.nio.file.WatchService;
|
||||||
|
import java.nio.file.attribute.BasicFileAttributes;
|
||||||
import java.nio.file.attribute.FileAttribute;
|
import java.nio.file.attribute.FileAttribute;
|
||||||
|
import java.nio.file.attribute.FileAttributeView;
|
||||||
|
import java.nio.file.attribute.FileTime;
|
||||||
|
import java.nio.file.attribute.PosixFilePermission;
|
||||||
import java.nio.file.attribute.UserPrincipal;
|
import java.nio.file.attribute.UserPrincipal;
|
||||||
import java.nio.file.spi.FileSystemProvider;
|
import java.nio.file.spi.FileSystemProvider;
|
||||||
|
import java.security.KeyStore;
|
||||||
|
import java.security.Provider;
|
||||||
import java.security.cert.CertStoreParameters;
|
import java.security.cert.CertStoreParameters;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
|
@ -75,6 +86,7 @@ import java.util.Set;
|
||||||
import java.util.TimeZone;
|
import java.util.TimeZone;
|
||||||
import java.util.concurrent.ExecutorService;
|
import java.util.concurrent.ExecutorService;
|
||||||
import java.util.concurrent.ForkJoinPool;
|
import java.util.concurrent.ForkJoinPool;
|
||||||
|
import java.util.function.BiPredicate;
|
||||||
import java.util.function.Consumer;
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
import javax.net.ssl.HostnameVerifier;
|
import javax.net.ssl.HostnameVerifier;
|
||||||
|
@ -621,19 +633,255 @@ public interface EntitlementChecker {
|
||||||
|
|
||||||
void check$java_io_RandomAccessFile$(Class<?> callerClass, File file, String mode);
|
void check$java_io_RandomAccessFile$(Class<?> callerClass, File file, String mode);
|
||||||
|
|
||||||
|
void check$java_security_KeyStore$$getInstance(Class<?> callerClass, File file, char[] password);
|
||||||
|
|
||||||
|
void check$java_security_KeyStore$$getInstance(Class<?> callerClass, File file, KeyStore.LoadStoreParameter param);
|
||||||
|
|
||||||
|
void check$java_security_KeyStore$Builder$$newInstance(Class<?> callerClass, File file, KeyStore.ProtectionParameter protection);
|
||||||
|
|
||||||
|
void check$java_security_KeyStore$Builder$$newInstance(
|
||||||
|
Class<?> callerClass,
|
||||||
|
String type,
|
||||||
|
Provider provider,
|
||||||
|
File file,
|
||||||
|
KeyStore.ProtectionParameter protection
|
||||||
|
);
|
||||||
|
|
||||||
void check$java_util_Scanner$(Class<?> callerClass, File source);
|
void check$java_util_Scanner$(Class<?> callerClass, File source);
|
||||||
|
|
||||||
void check$java_util_Scanner$(Class<?> callerClass, File source, String charsetName);
|
void check$java_util_Scanner$(Class<?> callerClass, File source, String charsetName);
|
||||||
|
|
||||||
void check$java_util_Scanner$(Class<?> callerClass, File source, Charset charset);
|
void check$java_util_Scanner$(Class<?> callerClass, File source, Charset charset);
|
||||||
|
|
||||||
|
void check$java_util_jar_JarFile$(Class<?> callerClass, String name);
|
||||||
|
|
||||||
|
void check$java_util_jar_JarFile$(Class<?> callerClass, String name, boolean verify);
|
||||||
|
|
||||||
|
void check$java_util_jar_JarFile$(Class<?> callerClass, File file);
|
||||||
|
|
||||||
|
void check$java_util_jar_JarFile$(Class<?> callerClass, File file, boolean verify);
|
||||||
|
|
||||||
|
void check$java_util_jar_JarFile$(Class<?> callerClass, File file, boolean verify, int mode);
|
||||||
|
|
||||||
|
void check$java_util_jar_JarFile$(Class<?> callerClass, File file, boolean verify, int mode, Runtime.Version version);
|
||||||
|
|
||||||
|
void check$java_util_zip_ZipFile$(Class<?> callerClass, String name);
|
||||||
|
|
||||||
|
void check$java_util_zip_ZipFile$(Class<?> callerClass, String name, Charset charset);
|
||||||
|
|
||||||
|
void check$java_util_zip_ZipFile$(Class<?> callerClass, File file);
|
||||||
|
|
||||||
|
void check$java_util_zip_ZipFile$(Class<?> callerClass, File file, int mode);
|
||||||
|
|
||||||
|
void check$java_util_zip_ZipFile$(Class<?> callerClass, File file, Charset charset);
|
||||||
|
|
||||||
|
void check$java_util_zip_ZipFile$(Class<?> callerClass, File file, int mode, Charset charset);
|
||||||
|
|
||||||
// nio
|
// nio
|
||||||
|
// channels
|
||||||
|
void check$java_nio_channels_FileChannel$(Class<?> callerClass);
|
||||||
|
|
||||||
|
void check$java_nio_channels_FileChannel$$open(
|
||||||
|
Class<?> callerClass,
|
||||||
|
Path path,
|
||||||
|
Set<? extends OpenOption> options,
|
||||||
|
FileAttribute<?>... attrs
|
||||||
|
);
|
||||||
|
|
||||||
|
void check$java_nio_channels_FileChannel$$open(Class<?> callerClass, Path path, OpenOption... options);
|
||||||
|
|
||||||
|
void check$java_nio_channels_AsynchronousFileChannel$(Class<?> callerClass);
|
||||||
|
|
||||||
|
void check$java_nio_channels_AsynchronousFileChannel$$open(
|
||||||
|
Class<?> callerClass,
|
||||||
|
Path path,
|
||||||
|
Set<? extends OpenOption> options,
|
||||||
|
ExecutorService executor,
|
||||||
|
FileAttribute<?>... attrs
|
||||||
|
);
|
||||||
|
|
||||||
|
void check$java_nio_channels_AsynchronousFileChannel$$open(Class<?> callerClass, Path path, OpenOption... options);
|
||||||
|
|
||||||
|
void check$jdk_nio_Channels$$readWriteSelectableChannel(
|
||||||
|
Class<?> callerClass,
|
||||||
|
FileDescriptor fd,
|
||||||
|
Channels.SelectableChannelCloser closer
|
||||||
|
);
|
||||||
|
|
||||||
|
// files
|
||||||
void check$java_nio_file_Files$$getOwner(Class<?> callerClass, Path path, LinkOption... options);
|
void check$java_nio_file_Files$$getOwner(Class<?> callerClass, Path path, LinkOption... options);
|
||||||
|
|
||||||
void check$java_nio_file_Files$$probeContentType(Class<?> callerClass, Path path);
|
void check$java_nio_file_Files$$probeContentType(Class<?> callerClass, Path path);
|
||||||
|
|
||||||
void check$java_nio_file_Files$$setOwner(Class<?> callerClass, Path path, UserPrincipal principal);
|
void check$java_nio_file_Files$$setOwner(Class<?> callerClass, Path path, UserPrincipal principal);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$newInputStream(Class<?> callerClass, Path path, OpenOption... options);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$newOutputStream(Class<?> callerClass, Path path, OpenOption... options);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$newByteChannel(
|
||||||
|
Class<?> callerClass,
|
||||||
|
Path path,
|
||||||
|
Set<? extends OpenOption> options,
|
||||||
|
FileAttribute<?>... attrs
|
||||||
|
);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$newByteChannel(Class<?> callerClass, Path path, OpenOption... options);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$newDirectoryStream(Class<?> callerClass, Path dir);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$newDirectoryStream(Class<?> callerClass, Path dir, String glob);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$newDirectoryStream(Class<?> callerClass, Path dir, DirectoryStream.Filter<? super Path> filter);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$createFile(Class<?> callerClass, Path path, FileAttribute<?>... attrs);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$createDirectory(Class<?> callerClass, Path dir, FileAttribute<?>... attrs);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$createDirectories(Class<?> callerClass, Path dir, FileAttribute<?>... attrs);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$createTempFile(Class<?> callerClass, Path dir, String prefix, String suffix, FileAttribute<?>... attrs);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$createTempFile(Class<?> callerClass, String prefix, String suffix, FileAttribute<?>... attrs);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$createTempDirectory(Class<?> callerClass, Path dir, String prefix, FileAttribute<?>... attrs);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$createTempDirectory(Class<?> callerClass, String prefix, FileAttribute<?>... attrs);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$createSymbolicLink(Class<?> callerClass, Path link, Path target, FileAttribute<?>... attrs);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$createLink(Class<?> callerClass, Path link, Path existing);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$delete(Class<?> callerClass, Path path);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$deleteIfExists(Class<?> callerClass, Path path);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$copy(Class<?> callerClass, Path source, Path target, CopyOption... options);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$move(Class<?> callerClass, Path source, Path target, CopyOption... options);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$readSymbolicLink(Class<?> callerClass, Path link);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$getFileStore(Class<?> callerClass, Path path);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$isSameFile(Class<?> callerClass, Path path, Path path2);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$mismatch(Class<?> callerClass, Path path, Path path2);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$isHidden(Class<?> callerClass, Path path);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$getFileAttributeView(
|
||||||
|
Class<?> callerClass,
|
||||||
|
Path path,
|
||||||
|
Class<? extends FileAttributeView> type,
|
||||||
|
LinkOption... options
|
||||||
|
);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$readAttributes(
|
||||||
|
Class<?> callerClass,
|
||||||
|
Path path,
|
||||||
|
Class<? extends BasicFileAttributes> type,
|
||||||
|
LinkOption... options
|
||||||
|
);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$setAttribute(Class<?> callerClass, Path path, String attribute, Object value, LinkOption... options);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$getAttribute(Class<?> callerClass, Path path, String attribute, LinkOption... options);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$readAttributes(Class<?> callerClass, Path path, String attributes, LinkOption... options);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$getPosixFilePermissions(Class<?> callerClass, Path path, LinkOption... options);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$setPosixFilePermissions(Class<?> callerClass, Path path, Set<PosixFilePermission> perms);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$isSymbolicLink(Class<?> callerClass, Path path);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$isDirectory(Class<?> callerClass, Path path, LinkOption... options);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$isRegularFile(Class<?> callerClass, Path path, LinkOption... options);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$getLastModifiedTime(Class<?> callerClass, Path path, LinkOption... options);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$setLastModifiedTime(Class<?> callerClass, Path path, FileTime time);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$size(Class<?> callerClass, Path path);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$exists(Class<?> callerClass, Path path, LinkOption... options);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$notExists(Class<?> callerClass, Path path, LinkOption... options);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$isReadable(Class<?> callerClass, Path path);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$isWritable(Class<?> callerClass, Path path);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$isExecutable(Class<?> callerClass, Path path);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$walkFileTree(
|
||||||
|
Class<?> callerClass,
|
||||||
|
Path start,
|
||||||
|
Set<FileVisitOption> options,
|
||||||
|
int maxDepth,
|
||||||
|
FileVisitor<? super Path> visitor
|
||||||
|
);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$walkFileTree(Class<?> callerClass, Path start, FileVisitor<? super Path> visitor);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$newBufferedReader(Class<?> callerClass, Path path, Charset cs);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$newBufferedReader(Class<?> callerClass, Path path);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$newBufferedWriter(Class<?> callerClass, Path path, Charset cs, OpenOption... options);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$newBufferedWriter(Class<?> callerClass, Path path, OpenOption... options);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$copy(Class<?> callerClass, InputStream in, Path target, CopyOption... options);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$copy(Class<?> callerClass, Path source, OutputStream out);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$readAllBytes(Class<?> callerClass, Path path);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$readString(Class<?> callerClass, Path path);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$readString(Class<?> callerClass, Path path, Charset cs);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$readAllLines(Class<?> callerClass, Path path, Charset cs);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$readAllLines(Class<?> callerClass, Path path);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$write(Class<?> callerClass, Path path, byte[] bytes, OpenOption... options);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$write(
|
||||||
|
Class<?> callerClass,
|
||||||
|
Path path,
|
||||||
|
Iterable<? extends CharSequence> lines,
|
||||||
|
Charset cs,
|
||||||
|
OpenOption... options
|
||||||
|
);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$write(Class<?> callerClass, Path path, Iterable<? extends CharSequence> lines, OpenOption... options);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$writeString(Class<?> callerClass, Path path, CharSequence csq, OpenOption... options);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$writeString(Class<?> callerClass, Path path, CharSequence csq, Charset cs, OpenOption... options);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$list(Class<?> callerClass, Path dir);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$walk(Class<?> callerClass, Path start, int maxDepth, FileVisitOption... options);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$walk(Class<?> callerClass, Path start, FileVisitOption... options);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$find(
|
||||||
|
Class<?> callerClass,
|
||||||
|
Path start,
|
||||||
|
int maxDepth,
|
||||||
|
BiPredicate<Path, BasicFileAttributes> matcher,
|
||||||
|
FileVisitOption... options
|
||||||
|
);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$lines(Class<?> callerClass, Path path, Charset cs);
|
||||||
|
|
||||||
|
void check$java_nio_file_Files$$lines(Class<?> callerClass, Path path);
|
||||||
|
|
||||||
// file system providers
|
// file system providers
|
||||||
void check$java_nio_file_spi_FileSystemProvider$(Class<?> callerClass);
|
void check$java_nio_file_spi_FileSystemProvider$(Class<?> callerClass);
|
||||||
|
|
||||||
|
|
|
@ -16,4 +16,5 @@ module org.elasticsearch.entitlement.qa.test {
|
||||||
// Modules we'll attempt to use in order to exercise entitlements
|
// Modules we'll attempt to use in order to exercise entitlements
|
||||||
requires java.logging;
|
requires java.logging;
|
||||||
requires java.net.http;
|
requires java.net.http;
|
||||||
|
requires jdk.net;
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,6 +9,10 @@
|
||||||
|
|
||||||
package org.elasticsearch.entitlement.qa.test;
|
package org.elasticsearch.entitlement.qa.test;
|
||||||
|
|
||||||
|
import jdk.nio.Channels;
|
||||||
|
|
||||||
|
import org.elasticsearch.core.SuppressForbidden;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
|
@ -24,14 +28,23 @@ import java.net.SocketAddress;
|
||||||
import java.net.SocketException;
|
import java.net.SocketException;
|
||||||
import java.net.SocketImpl;
|
import java.net.SocketImpl;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
|
import java.nio.ByteBuffer;
|
||||||
|
import java.nio.MappedByteBuffer;
|
||||||
import java.nio.channels.AsynchronousChannelGroup;
|
import java.nio.channels.AsynchronousChannelGroup;
|
||||||
|
import java.nio.channels.AsynchronousFileChannel;
|
||||||
import java.nio.channels.AsynchronousServerSocketChannel;
|
import java.nio.channels.AsynchronousServerSocketChannel;
|
||||||
import java.nio.channels.AsynchronousSocketChannel;
|
import java.nio.channels.AsynchronousSocketChannel;
|
||||||
|
import java.nio.channels.CompletionHandler;
|
||||||
import java.nio.channels.DatagramChannel;
|
import java.nio.channels.DatagramChannel;
|
||||||
|
import java.nio.channels.FileChannel;
|
||||||
|
import java.nio.channels.FileLock;
|
||||||
import java.nio.channels.Pipe;
|
import java.nio.channels.Pipe;
|
||||||
|
import java.nio.channels.ReadableByteChannel;
|
||||||
import java.nio.channels.SeekableByteChannel;
|
import java.nio.channels.SeekableByteChannel;
|
||||||
|
import java.nio.channels.SelectableChannel;
|
||||||
import java.nio.channels.ServerSocketChannel;
|
import java.nio.channels.ServerSocketChannel;
|
||||||
import java.nio.channels.SocketChannel;
|
import java.nio.channels.SocketChannel;
|
||||||
|
import java.nio.channels.WritableByteChannel;
|
||||||
import java.nio.channels.spi.AbstractSelector;
|
import java.nio.channels.spi.AbstractSelector;
|
||||||
import java.nio.channels.spi.AsynchronousChannelProvider;
|
import java.nio.channels.spi.AsynchronousChannelProvider;
|
||||||
import java.nio.channels.spi.SelectorProvider;
|
import java.nio.channels.spi.SelectorProvider;
|
||||||
|
@ -67,6 +80,7 @@ import java.util.Locale;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.concurrent.ExecutorService;
|
import java.util.concurrent.ExecutorService;
|
||||||
|
import java.util.concurrent.Future;
|
||||||
import java.util.concurrent.ThreadFactory;
|
import java.util.concurrent.ThreadFactory;
|
||||||
import java.util.spi.CalendarDataProvider;
|
import java.util.spi.CalendarDataProvider;
|
||||||
import java.util.spi.CalendarNameProvider;
|
import java.util.spi.CalendarNameProvider;
|
||||||
|
@ -676,4 +690,162 @@ class DummyImplementations {
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static class DummyFileChannel extends FileChannel {
|
||||||
|
@Override
|
||||||
|
protected void implCloseChannel() throws IOException {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int read(ByteBuffer dst) throws IOException {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long read(ByteBuffer[] dsts, int offset, int length) throws IOException {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int write(ByteBuffer src) throws IOException {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long write(ByteBuffer[] srcs, int offset, int length) throws IOException {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long position() throws IOException {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public FileChannel position(long newPosition) throws IOException {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long size() throws IOException {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public FileChannel truncate(long size) throws IOException {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void force(boolean metaData) throws IOException {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long transferTo(long position, long count, WritableByteChannel target) throws IOException {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long transferFrom(ReadableByteChannel src, long position, long count) throws IOException {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int read(ByteBuffer dst, long position) throws IOException {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int write(ByteBuffer src, long position) throws IOException {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public MappedByteBuffer map(MapMode mode, long position, long size) throws IOException {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public FileLock lock(long position, long size, boolean shared) throws IOException {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public FileLock tryLock(long position, long size, boolean shared) throws IOException {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static class DummyAsynchronousFileChannel extends AsynchronousFileChannel {
|
||||||
|
@Override
|
||||||
|
public boolean isOpen() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() throws IOException {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long size() throws IOException {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public AsynchronousFileChannel truncate(long size) throws IOException {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void force(boolean metaData) throws IOException {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public <A> void lock(long position, long size, boolean shared, A attachment, CompletionHandler<FileLock, ? super A> handler) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Future<FileLock> lock(long position, long size, boolean shared) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public FileLock tryLock(long position, long size, boolean shared) throws IOException {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public <A> void read(ByteBuffer dst, long position, A attachment, CompletionHandler<Integer, ? super A> handler) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Future<Integer> read(ByteBuffer dst, long position) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public <A> void write(ByteBuffer src, long position, A attachment, CompletionHandler<Integer, ? super A> handler) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Future<Integer> write(ByteBuffer src, long position) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressForbidden(reason = "specifically testing readWriteSelectableChannel")
|
||||||
|
static class DummySelectableChannelCloser implements Channels.SelectableChannelCloser {
|
||||||
|
@Override
|
||||||
|
public void implCloseChannel(SelectableChannel sc) throws IOException {}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void implReleaseChannel(SelectableChannel sc) throws IOException {}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,6 +9,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.entitlement.qa.test;
|
package org.elasticsearch.entitlement.qa.test;
|
||||||
|
|
||||||
|
import org.elasticsearch.core.CheckedRunnable;
|
||||||
import org.elasticsearch.core.SuppressForbidden;
|
import org.elasticsearch.core.SuppressForbidden;
|
||||||
import org.elasticsearch.entitlement.qa.entitled.EntitledActions;
|
import org.elasticsearch.entitlement.qa.entitled.EntitledActions;
|
||||||
|
|
||||||
|
@ -22,16 +23,24 @@ import java.io.FileWriter;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.RandomAccessFile;
|
import java.io.RandomAccessFile;
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.nio.file.Files;
|
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
import java.nio.file.Paths;
|
import java.nio.file.Paths;
|
||||||
import java.nio.file.attribute.UserPrincipal;
|
import java.security.GeneralSecurityException;
|
||||||
|
import java.security.KeyStore;
|
||||||
import java.util.Scanner;
|
import java.util.Scanner;
|
||||||
|
import java.util.jar.JarFile;
|
||||||
|
import java.util.zip.ZipException;
|
||||||
|
import java.util.zip.ZipFile;
|
||||||
|
|
||||||
|
import static java.nio.charset.Charset.defaultCharset;
|
||||||
|
import static java.util.zip.ZipFile.OPEN_DELETE;
|
||||||
|
import static java.util.zip.ZipFile.OPEN_READ;
|
||||||
|
import static org.elasticsearch.entitlement.qa.entitled.EntitledActions.createTempFileForWrite;
|
||||||
import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.ALWAYS_DENIED;
|
import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.ALWAYS_DENIED;
|
||||||
import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.PLUGINS;
|
import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.PLUGINS;
|
||||||
|
|
||||||
@SuppressForbidden(reason = "Explicitly checking APIs that are forbidden")
|
@SuppressForbidden(reason = "Explicitly checking APIs that are forbidden")
|
||||||
|
@SuppressWarnings("unused") // Called via reflection
|
||||||
class FileCheckActions {
|
class FileCheckActions {
|
||||||
|
|
||||||
static Path testRootDir = Paths.get(System.getProperty("es.entitlements.testdir"));
|
static Path testRootDir = Paths.get(System.getProperty("es.entitlements.testdir"));
|
||||||
|
@ -207,21 +216,6 @@ class FileCheckActions {
|
||||||
readWriteFile().toFile().setWritable(true, false);
|
readWriteFile().toFile().setWritable(true, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@EntitlementTest(expectedAccess = PLUGINS)
|
|
||||||
static void createScannerFile() throws FileNotFoundException {
|
|
||||||
new Scanner(readFile().toFile());
|
|
||||||
}
|
|
||||||
|
|
||||||
@EntitlementTest(expectedAccess = PLUGINS)
|
|
||||||
static void createScannerFileWithCharset() throws IOException {
|
|
||||||
new Scanner(readFile().toFile(), StandardCharsets.UTF_8);
|
|
||||||
}
|
|
||||||
|
|
||||||
@EntitlementTest(expectedAccess = PLUGINS)
|
|
||||||
static void createScannerFileWithCharsetName() throws FileNotFoundException {
|
|
||||||
new Scanner(readFile().toFile(), "UTF-8");
|
|
||||||
}
|
|
||||||
|
|
||||||
@EntitlementTest(expectedAccess = PLUGINS)
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
static void createFileInputStreamFile() throws IOException {
|
static void createFileInputStreamFile() throws IOException {
|
||||||
new FileInputStream(readFile().toFile()).close();
|
new FileInputStream(readFile().toFile()).close();
|
||||||
|
@ -348,19 +342,138 @@ class FileCheckActions {
|
||||||
}
|
}
|
||||||
|
|
||||||
@EntitlementTest(expectedAccess = PLUGINS)
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
static void filesGetOwner() throws IOException {
|
static void keystoreGetInstance_FileCharArray() throws IOException {
|
||||||
Files.getOwner(readFile());
|
try {
|
||||||
|
KeyStore.getInstance(readFile().toFile(), new char[0]);
|
||||||
|
} catch (GeneralSecurityException expected) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
throw new AssertionError("Expected an exception");
|
||||||
}
|
}
|
||||||
|
|
||||||
@EntitlementTest(expectedAccess = PLUGINS)
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
static void filesProbeContentType() throws IOException {
|
static void keystoreGetInstance_FileLoadStoreParameter() throws IOException {
|
||||||
Files.probeContentType(readFile());
|
try {
|
||||||
|
KeyStore.LoadStoreParameter loadStoreParameter = () -> null;
|
||||||
|
KeyStore.getInstance(readFile().toFile(), loadStoreParameter);
|
||||||
|
} catch (GeneralSecurityException expected) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
throw new AssertionError("Expected an exception");
|
||||||
}
|
}
|
||||||
|
|
||||||
@EntitlementTest(expectedAccess = PLUGINS)
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
static void filesSetOwner() throws IOException {
|
static void keystoreBuilderNewInstance() {
|
||||||
UserPrincipal owner = EntitledActions.getFileOwner(readWriteFile());
|
try {
|
||||||
Files.setOwner(readWriteFile(), owner); // set to existing owner, just trying to execute the method
|
KeyStore.Builder.newInstance("", null, readFile().toFile(), null);
|
||||||
|
} catch (NullPointerException expected) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
throw new AssertionError("Expected an exception");
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void zipFile_String() throws IOException {
|
||||||
|
expectZipException(() -> new ZipFile(readFile().toString()).close());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void zipFile_StringCharset() throws IOException {
|
||||||
|
expectZipException(() -> new ZipFile(readFile().toString(), defaultCharset()).close());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void zipFile_File() throws IOException {
|
||||||
|
expectZipException(() -> new ZipFile(readFile().toFile()).close());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void zipFile_FileCharset() throws IOException {
|
||||||
|
expectZipException(() -> new ZipFile(readFile().toFile(), defaultCharset()).close());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void zipFile_FileReadOnly() throws IOException {
|
||||||
|
expectZipException(() -> new ZipFile(readFile().toFile(), OPEN_READ).close());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void zipFile_FileReadAndDelete() throws IOException {
|
||||||
|
expectZipException(() -> new ZipFile(createTempFileForWrite().toFile(), OPEN_READ | OPEN_DELETE).close());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void zipFile_ReadOnlyCharset() throws IOException {
|
||||||
|
expectZipException(() -> new ZipFile(readFile().toFile(), OPEN_READ, defaultCharset()).close());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void zipFile_ReadAndDeleteCharset() throws IOException {
|
||||||
|
expectZipException(() -> new ZipFile(createTempFileForWrite().toFile(), OPEN_READ | OPEN_DELETE, defaultCharset()).close());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void jarFile_String() throws IOException {
|
||||||
|
expectZipException(() -> new JarFile(readFile().toString()).close());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void jarFile_StringBoolean() throws IOException {
|
||||||
|
expectZipException(() -> new JarFile(readFile().toString(), false).close());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void jarFile_FileReadOnly() throws IOException {
|
||||||
|
expectZipException(() -> new JarFile(readFile().toFile(), false, OPEN_READ).close());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void jarFile_FileReadAndDelete() throws IOException {
|
||||||
|
expectZipException(() -> new JarFile(createTempFileForWrite().toFile(), false, OPEN_READ | OPEN_DELETE).close());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void jarFile_FileBooleanReadOnlyVersion() throws IOException {
|
||||||
|
expectZipException(() -> new JarFile(readFile().toFile(), false, OPEN_READ, Runtime.version()).close());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void jarFile_FileBooleanReadAndDeleteOnlyVersion() throws IOException {
|
||||||
|
expectZipException(() -> new JarFile(createTempFileForWrite().toFile(), false, OPEN_READ | OPEN_DELETE, Runtime.version()).close());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void jarFile_File() throws IOException {
|
||||||
|
expectZipException(() -> new JarFile(readFile().toFile()).close());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void jarFileFileBoolean() throws IOException {
|
||||||
|
expectZipException(() -> new JarFile(readFile().toFile(), false).close());
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void expectZipException(CheckedRunnable<IOException> action) throws IOException {
|
||||||
|
try {
|
||||||
|
action.run();
|
||||||
|
} catch (ZipException expected) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
throw new AssertionError("Expected an exception");
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void createScannerFile() throws FileNotFoundException {
|
||||||
|
new Scanner(readFile().toFile());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void createScannerFileWithCharset() throws IOException {
|
||||||
|
new Scanner(readFile().toFile(), StandardCharsets.UTF_8);
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void createScannerFileWithCharsetName() throws FileNotFoundException {
|
||||||
|
new Scanner(readFile().toFile(), "UTF-8");
|
||||||
}
|
}
|
||||||
|
|
||||||
private FileCheckActions() {}
|
private FileCheckActions() {}
|
||||||
|
|
|
@ -0,0 +1,87 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the "Elastic License
|
||||||
|
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||||
|
* Public License v 1"; you may not use this file except in compliance with, at
|
||||||
|
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||||
|
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.entitlement.qa.test;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.util.concurrent.EsExecutors;
|
||||||
|
import org.elasticsearch.core.SuppressForbidden;
|
||||||
|
import org.elasticsearch.entitlement.qa.entitled.EntitledActions;
|
||||||
|
|
||||||
|
import java.io.FileDescriptor;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.channels.AsynchronousFileChannel;
|
||||||
|
import java.nio.channels.FileChannel;
|
||||||
|
import java.nio.file.StandardOpenOption;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.ALWAYS_DENIED;
|
||||||
|
import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.PLUGINS;
|
||||||
|
|
||||||
|
class NioChannelsActions {
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = ALWAYS_DENIED)
|
||||||
|
static void createFileChannel() throws IOException {
|
||||||
|
new DummyImplementations.DummyFileChannel().close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void fileChannelOpenForWrite() throws IOException {
|
||||||
|
FileChannel.open(FileCheckActions.readWriteFile(), StandardOpenOption.WRITE).close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void fileChannelOpenForRead() throws IOException {
|
||||||
|
FileChannel.open(FileCheckActions.readFile()).close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void fileChannelOpenForWriteWithOptions() throws IOException {
|
||||||
|
FileChannel.open(FileCheckActions.readWriteFile(), Set.of(StandardOpenOption.WRITE)).close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void fileChannelOpenForReadWithOptions() throws IOException {
|
||||||
|
FileChannel.open(FileCheckActions.readFile(), Set.of(StandardOpenOption.READ)).close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = ALWAYS_DENIED)
|
||||||
|
static void createAsynchronousFileChannel() throws IOException {
|
||||||
|
new DummyImplementations.DummyAsynchronousFileChannel().close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void asynchronousFileChannelOpenForWrite() throws IOException {
|
||||||
|
var file = EntitledActions.createTempFileForWrite();
|
||||||
|
AsynchronousFileChannel.open(file, StandardOpenOption.WRITE).close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void asynchronousFileChannelOpenForRead() throws IOException {
|
||||||
|
var file = EntitledActions.createTempFileForRead();
|
||||||
|
AsynchronousFileChannel.open(file).close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void asynchronousFileChannelOpenForWriteWithOptions() throws IOException {
|
||||||
|
var file = EntitledActions.createTempFileForWrite();
|
||||||
|
AsynchronousFileChannel.open(file, Set.of(StandardOpenOption.WRITE), EsExecutors.DIRECT_EXECUTOR_SERVICE).close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void asynchronousFileChannelOpenForReadWithOptions() throws IOException {
|
||||||
|
var file = EntitledActions.createTempFileForRead();
|
||||||
|
AsynchronousFileChannel.open(file, Set.of(StandardOpenOption.READ), EsExecutors.DIRECT_EXECUTOR_SERVICE).close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressForbidden(reason = "specifically testing jdk.nio.Channels")
|
||||||
|
@EntitlementTest(expectedAccess = ALWAYS_DENIED)
|
||||||
|
static void channelsReadWriteSelectableChannel() throws IOException {
|
||||||
|
jdk.nio.Channels.readWriteSelectableChannel(new FileDescriptor(), new DummyImplementations.DummySelectableChannelCloser()).close();
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,475 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the "Elastic License
|
||||||
|
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||||
|
* Public License v 1"; you may not use this file except in compliance with, at
|
||||||
|
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||||
|
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.entitlement.qa.test;
|
||||||
|
|
||||||
|
import org.elasticsearch.core.SuppressForbidden;
|
||||||
|
import org.elasticsearch.entitlement.qa.entitled.EntitledActions;
|
||||||
|
|
||||||
|
import java.io.ByteArrayInputStream;
|
||||||
|
import java.io.ByteArrayOutputStream;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.charset.Charset;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.nio.file.FileSystemException;
|
||||||
|
import java.nio.file.FileVisitOption;
|
||||||
|
import java.nio.file.FileVisitResult;
|
||||||
|
import java.nio.file.FileVisitor;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.nio.file.StandardOpenOption;
|
||||||
|
import java.nio.file.attribute.BasicFileAttributes;
|
||||||
|
import java.nio.file.attribute.FileOwnerAttributeView;
|
||||||
|
import java.nio.file.attribute.FileTime;
|
||||||
|
import java.nio.file.attribute.UserPrincipal;
|
||||||
|
import java.time.Instant;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.ALWAYS_DENIED;
|
||||||
|
import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.PLUGINS;
|
||||||
|
import static org.elasticsearch.entitlement.qa.test.FileCheckActions.readDir;
|
||||||
|
import static org.elasticsearch.entitlement.qa.test.FileCheckActions.readFile;
|
||||||
|
import static org.elasticsearch.entitlement.qa.test.FileCheckActions.readWriteDir;
|
||||||
|
import static org.elasticsearch.entitlement.qa.test.FileCheckActions.readWriteFile;
|
||||||
|
|
||||||
|
class NioFilesActions {
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void filesGetOwner() throws IOException {
|
||||||
|
Files.getOwner(readFile());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void filesProbeContentType() throws IOException {
|
||||||
|
Files.probeContentType(readFile());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void filesSetOwner() throws IOException {
|
||||||
|
UserPrincipal owner = EntitledActions.getFileOwner(readWriteFile());
|
||||||
|
Files.setOwner(readWriteFile(), owner); // set to existing owner, just trying to execute the method
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesNewInputStream() throws IOException {
|
||||||
|
Files.newInputStream(readFile()).close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesNewOutputStream() throws IOException {
|
||||||
|
Files.newOutputStream(readWriteFile()).close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesNewByteChannelRead() throws IOException {
|
||||||
|
Files.newByteChannel(readFile(), Set.of(StandardOpenOption.READ)).close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesNewByteChannelWrite() throws IOException {
|
||||||
|
Files.newByteChannel(readWriteFile(), Set.of(StandardOpenOption.WRITE)).close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesNewByteChannelReadVarargs() throws IOException {
|
||||||
|
Files.newByteChannel(readFile(), StandardOpenOption.READ).close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesNewByteChannelWriteVarargs() throws IOException {
|
||||||
|
Files.newByteChannel(readWriteFile(), StandardOpenOption.WRITE).close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesNewDirectoryStream() throws IOException {
|
||||||
|
Files.newDirectoryStream(FileCheckActions.readDir()).close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesNewDirectoryStreamGlob() throws IOException {
|
||||||
|
Files.newDirectoryStream(FileCheckActions.readDir(), "*").close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesNewDirectoryStreamFilter() throws IOException {
|
||||||
|
Files.newDirectoryStream(FileCheckActions.readDir(), entry -> false).close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesCreateFile() throws IOException {
|
||||||
|
Files.createFile(readWriteDir().resolve("file.txt"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesCreateDirectory() throws IOException {
|
||||||
|
var directory = EntitledActions.createTempDirectoryForWrite();
|
||||||
|
Files.createDirectory(directory.resolve("subdir"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesCreateDirectories() throws IOException {
|
||||||
|
var directory = EntitledActions.createTempDirectoryForWrite();
|
||||||
|
Files.createDirectories(directory.resolve("subdir").resolve("subsubdir"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesCreateTempFileInDir() throws IOException {
|
||||||
|
Files.createTempFile(readWriteDir(), "prefix", "suffix");
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesCreateTempDirectoryInDir() throws IOException {
|
||||||
|
Files.createTempDirectory(readWriteDir(), "prefix");
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesCreateSymbolicLink() throws IOException {
|
||||||
|
var directory = EntitledActions.createTempDirectoryForWrite();
|
||||||
|
try {
|
||||||
|
Files.createSymbolicLink(directory.resolve("link"), readFile());
|
||||||
|
} catch (UnsupportedOperationException | FileSystemException e) {
|
||||||
|
// OK not to implement symbolic link in the filesystem
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesCreateLink() throws IOException {
|
||||||
|
var directory = EntitledActions.createTempDirectoryForWrite();
|
||||||
|
try {
|
||||||
|
Files.createLink(directory.resolve("link"), readFile());
|
||||||
|
} catch (UnsupportedOperationException | FileSystemException e) {
|
||||||
|
// OK not to implement symbolic link in the filesystem
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesDelete() throws IOException {
|
||||||
|
var file = EntitledActions.createTempFileForWrite();
|
||||||
|
Files.delete(file);
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesDeleteIfExists() throws IOException {
|
||||||
|
var file = EntitledActions.createTempFileForWrite();
|
||||||
|
Files.deleteIfExists(file);
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesReadSymbolicLink() throws IOException {
|
||||||
|
var link = EntitledActions.createTempSymbolicLink();
|
||||||
|
Files.readSymbolicLink(link);
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesCopy() throws IOException {
|
||||||
|
var directory = EntitledActions.createTempDirectoryForWrite();
|
||||||
|
Files.copy(readFile(), directory.resolve("copied"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesMove() throws IOException {
|
||||||
|
var directory = EntitledActions.createTempDirectoryForWrite();
|
||||||
|
var file = EntitledActions.createTempFileForWrite();
|
||||||
|
Files.move(file, directory.resolve("moved"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesIsSameFile() throws IOException {
|
||||||
|
Files.isSameFile(readWriteFile(), readFile());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesMismatch() throws IOException {
|
||||||
|
Files.mismatch(readWriteFile(), readFile());
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressForbidden(reason = "testing entitlements on this API specifically")
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesIsHidden() throws IOException {
|
||||||
|
Files.isHidden(readFile());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesGetFileStore() throws IOException {
|
||||||
|
var file = EntitledActions.createTempFileForRead();
|
||||||
|
Files.getFileStore(file);
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = ALWAYS_DENIED)
|
||||||
|
static void checkFilesGetFileAttributeView() {
|
||||||
|
Files.getFileAttributeView(readFile(), FileOwnerAttributeView.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesReadAttributesWithClass() throws IOException {
|
||||||
|
Files.readAttributes(readFile(), BasicFileAttributes.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesReadAttributesWithString() throws IOException {
|
||||||
|
Files.readAttributes(readFile(), "*");
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesGetAttribute() throws IOException {
|
||||||
|
try {
|
||||||
|
Files.getAttribute(readFile(), "dos:hidden");
|
||||||
|
} catch (UnsupportedOperationException | IllegalArgumentException | FileSystemException e) {
|
||||||
|
// OK if the file does not have/does not support the attribute
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesSetAttribute() throws IOException {
|
||||||
|
var file = EntitledActions.createTempFileForWrite();
|
||||||
|
try {
|
||||||
|
Files.setAttribute(file, "dos:hidden", true);
|
||||||
|
} catch (UnsupportedOperationException | IllegalArgumentException | FileSystemException e) {
|
||||||
|
// OK if the file does not have/does not support the attribute
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesGetPosixFilePermissions() throws IOException {
|
||||||
|
try {
|
||||||
|
Files.getPosixFilePermissions(readFile());
|
||||||
|
} catch (UnsupportedOperationException | IllegalArgumentException | FileSystemException e) {
|
||||||
|
// OK if the file does not have/does not support the attribute
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesSetPosixFilePermissions() throws IOException {
|
||||||
|
var file = EntitledActions.createTempFileForWrite();
|
||||||
|
try {
|
||||||
|
Files.setPosixFilePermissions(file, Set.of());
|
||||||
|
} catch (UnsupportedOperationException | IllegalArgumentException | FileSystemException e) {
|
||||||
|
// OK if the file does not have/does not support the attribute
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesIsSymbolicLink() {
|
||||||
|
Files.isSymbolicLink(readFile());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesIsDirectory() {
|
||||||
|
Files.isDirectory(readFile());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesIsRegularFile() {
|
||||||
|
Files.isRegularFile(readFile());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesGetLastModifiedTime() throws IOException {
|
||||||
|
Files.getLastModifiedTime(readFile());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesSetLastModifiedTime() throws IOException {
|
||||||
|
var file = EntitledActions.createTempFileForWrite();
|
||||||
|
Files.setLastModifiedTime(file, FileTime.from(Instant.now()));
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesSize() throws IOException {
|
||||||
|
Files.size(readFile());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesExists() {
|
||||||
|
Files.exists(readFile());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesNotExists() {
|
||||||
|
Files.notExists(readFile());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesIsReadable() {
|
||||||
|
Files.isReadable(readFile());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesIsWriteable() {
|
||||||
|
Files.isWritable(readFile());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesIsExecutable() {
|
||||||
|
Files.isExecutable(readFile());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesWalkFileTree() throws IOException {
|
||||||
|
Files.walkFileTree(readDir(), new FileVisitor<>() {
|
||||||
|
@Override
|
||||||
|
public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {
|
||||||
|
return FileVisitResult.SKIP_SUBTREE;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||||
|
return FileVisitResult.SKIP_SUBTREE;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException {
|
||||||
|
return FileVisitResult.SKIP_SUBTREE;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
|
||||||
|
return FileVisitResult.SKIP_SUBTREE;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesWalkFileTreeWithOptions() throws IOException {
|
||||||
|
Files.walkFileTree(readDir(), Set.of(FileVisitOption.FOLLOW_LINKS), 2, new FileVisitor<>() {
|
||||||
|
@Override
|
||||||
|
public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {
|
||||||
|
return FileVisitResult.SKIP_SUBTREE;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||||
|
return FileVisitResult.SKIP_SUBTREE;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException {
|
||||||
|
return FileVisitResult.SKIP_SUBTREE;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
|
||||||
|
return FileVisitResult.SKIP_SUBTREE;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesNewBufferedReader() throws IOException {
|
||||||
|
Files.newBufferedReader(readFile()).close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesNewBufferedReaderWithCharset() throws IOException {
|
||||||
|
Files.newBufferedReader(readFile(), Charset.defaultCharset()).close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesNewBufferedWriter() throws IOException {
|
||||||
|
Files.newBufferedWriter(readWriteFile(), StandardOpenOption.WRITE).close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesNewBufferedWriterWithCharset() throws IOException {
|
||||||
|
Files.newBufferedWriter(readWriteFile(), Charset.defaultCharset(), StandardOpenOption.WRITE).close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesCopyInputStream() throws IOException {
|
||||||
|
var directory = EntitledActions.createTempDirectoryForWrite();
|
||||||
|
Files.copy(new ByteArrayInputStream("foo".getBytes(StandardCharsets.UTF_8)), directory.resolve("copied"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesCopyOutputStream() throws IOException {
|
||||||
|
Files.copy(readFile(), new ByteArrayOutputStream());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesReadAllBytes() throws IOException {
|
||||||
|
Files.readAllBytes(readFile());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesReadString() throws IOException {
|
||||||
|
Files.readString(readFile());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesReadStringWithCharset() throws IOException {
|
||||||
|
Files.readString(readFile(), Charset.defaultCharset());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesReadAllLines() throws IOException {
|
||||||
|
Files.readAllLines(readFile());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesReadAllLinesWithCharset() throws IOException {
|
||||||
|
Files.readAllLines(readFile(), Charset.defaultCharset());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesWrite() throws IOException {
|
||||||
|
var directory = EntitledActions.createTempDirectoryForWrite();
|
||||||
|
Files.write(directory.resolve("file"), "foo".getBytes(StandardCharsets.UTF_8));
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesWriteLines() throws IOException {
|
||||||
|
var directory = EntitledActions.createTempDirectoryForWrite();
|
||||||
|
Files.write(directory.resolve("file"), List.of("foo"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesWriteString() throws IOException {
|
||||||
|
var directory = EntitledActions.createTempDirectoryForWrite();
|
||||||
|
Files.writeString(directory.resolve("file"), "foo");
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesWriteStringWithCharset() throws IOException {
|
||||||
|
var directory = EntitledActions.createTempDirectoryForWrite();
|
||||||
|
Files.writeString(directory.resolve("file"), "foo", Charset.defaultCharset());
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesList() throws IOException {
|
||||||
|
Files.list(readDir()).close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesWalk() throws IOException {
|
||||||
|
Files.walk(readDir()).close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesWalkWithDepth() throws IOException {
|
||||||
|
Files.walk(readDir(), 2).close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesFind() throws IOException {
|
||||||
|
Files.find(readDir(), 2, (path, basicFileAttributes) -> false).close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesLines() throws IOException {
|
||||||
|
Files.lines(readFile()).close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@EntitlementTest(expectedAccess = PLUGINS)
|
||||||
|
static void checkFilesLinesWithCharset() throws IOException {
|
||||||
|
Files.lines(readFile(), Charset.defaultCharset()).close();
|
||||||
|
}
|
||||||
|
|
||||||
|
private NioFilesActions() {}
|
||||||
|
}
|
|
@ -189,6 +189,8 @@ public class RestEntitlementsCheckAction extends BaseRestHandler {
|
||||||
getTestEntries(FileStoreActions.class),
|
getTestEntries(FileStoreActions.class),
|
||||||
getTestEntries(ManageThreadsActions.class),
|
getTestEntries(ManageThreadsActions.class),
|
||||||
getTestEntries(NativeActions.class),
|
getTestEntries(NativeActions.class),
|
||||||
|
getTestEntries(NioChannelsActions.class),
|
||||||
|
getTestEntries(NioFilesActions.class),
|
||||||
getTestEntries(NioFileSystemActions.class),
|
getTestEntries(NioFileSystemActions.class),
|
||||||
getTestEntries(PathActions.class),
|
getTestEntries(PathActions.class),
|
||||||
getTestEntries(SpiActions.class),
|
getTestEntries(SpiActions.class),
|
||||||
|
|
|
@ -38,7 +38,7 @@ class EntitlementsTestRule implements TestRule {
|
||||||
Map.of(
|
Map.of(
|
||||||
"files",
|
"files",
|
||||||
List.of(
|
List.of(
|
||||||
Map.of("path", tempDir.resolve("read_dir"), "mode", "read"),
|
Map.of("path", tempDir.resolve("read_dir"), "mode", "read_write"),
|
||||||
Map.of("path", tempDir.resolve("read_write_dir"), "mode", "read_write"),
|
Map.of("path", tempDir.resolve("read_write_dir"), "mode", "read_write"),
|
||||||
Map.of("path", tempDir.resolve("read_file"), "mode", "read"),
|
Map.of("path", tempDir.resolve("read_file"), "mode", "read"),
|
||||||
Map.of("path", tempDir.resolve("read_write_file"), "mode", "read_write")
|
Map.of("path", tempDir.resolve("read_write_file"), "mode", "read_write")
|
||||||
|
|
|
@ -14,6 +14,7 @@ module org.elasticsearch.entitlement {
|
||||||
requires org.elasticsearch.base;
|
requires org.elasticsearch.base;
|
||||||
requires jdk.attach;
|
requires jdk.attach;
|
||||||
requires java.net.http;
|
requires java.net.http;
|
||||||
|
requires jdk.net;
|
||||||
|
|
||||||
requires static org.elasticsearch.entitlement.bridge; // At runtime, this will be in java.base
|
requires static org.elasticsearch.entitlement.bridge; // At runtime, this will be in java.base
|
||||||
|
|
||||||
|
|
|
@ -39,6 +39,7 @@ public class EntitlementBootstrap {
|
||||||
Function<Class<?>, String> pluginResolver,
|
Function<Class<?>, String> pluginResolver,
|
||||||
Function<String, String> settingResolver,
|
Function<String, String> settingResolver,
|
||||||
Function<String, Stream<String>> settingGlobResolver,
|
Function<String, Stream<String>> settingGlobResolver,
|
||||||
|
Function<String, Path> repoDirResolver,
|
||||||
Path[] dataDirs,
|
Path[] dataDirs,
|
||||||
Path configDir,
|
Path configDir,
|
||||||
Path logsDir,
|
Path logsDir,
|
||||||
|
@ -49,6 +50,7 @@ public class EntitlementBootstrap {
|
||||||
requireNonNull(pluginResolver);
|
requireNonNull(pluginResolver);
|
||||||
requireNonNull(settingResolver);
|
requireNonNull(settingResolver);
|
||||||
requireNonNull(settingGlobResolver);
|
requireNonNull(settingGlobResolver);
|
||||||
|
requireNonNull(repoDirResolver);
|
||||||
requireNonNull(dataDirs);
|
requireNonNull(dataDirs);
|
||||||
if (dataDirs.length == 0) {
|
if (dataDirs.length == 0) {
|
||||||
throw new IllegalArgumentException("must provide at least one data directory");
|
throw new IllegalArgumentException("must provide at least one data directory");
|
||||||
|
@ -71,6 +73,9 @@ public class EntitlementBootstrap {
|
||||||
*
|
*
|
||||||
* @param pluginPolicies a map holding policies for plugins (and modules), by plugin (or module) name.
|
* @param pluginPolicies a map holding policies for plugins (and modules), by plugin (or module) name.
|
||||||
* @param pluginResolver a functor to map a Java Class to the plugin it belongs to (the plugin name).
|
* @param pluginResolver a functor to map a Java Class to the plugin it belongs to (the plugin name).
|
||||||
|
* @param settingResolver a functor to resolve the value of an Elasticsearch setting.
|
||||||
|
* @param settingGlobResolver a functor to resolve a glob expression for one or more Elasticsearch settings.
|
||||||
|
* @param repoDirResolver a functor to map a repository location to its Elasticsearch path.
|
||||||
* @param dataDirs data directories for Elasticsearch
|
* @param dataDirs data directories for Elasticsearch
|
||||||
* @param configDir the config directory for Elasticsearch
|
* @param configDir the config directory for Elasticsearch
|
||||||
* @param tempDir the temp directory for Elasticsearch
|
* @param tempDir the temp directory for Elasticsearch
|
||||||
|
@ -81,6 +86,7 @@ public class EntitlementBootstrap {
|
||||||
Function<Class<?>, String> pluginResolver,
|
Function<Class<?>, String> pluginResolver,
|
||||||
Function<String, String> settingResolver,
|
Function<String, String> settingResolver,
|
||||||
Function<String, Stream<String>> settingGlobResolver,
|
Function<String, Stream<String>> settingGlobResolver,
|
||||||
|
Function<String, Path> repoDirResolver,
|
||||||
Path[] dataDirs,
|
Path[] dataDirs,
|
||||||
Path configDir,
|
Path configDir,
|
||||||
Path logsDir,
|
Path logsDir,
|
||||||
|
@ -95,6 +101,7 @@ public class EntitlementBootstrap {
|
||||||
pluginResolver,
|
pluginResolver,
|
||||||
settingResolver,
|
settingResolver,
|
||||||
settingGlobResolver,
|
settingGlobResolver,
|
||||||
|
repoDirResolver,
|
||||||
dataDirs,
|
dataDirs,
|
||||||
configDir,
|
configDir,
|
||||||
logsDir,
|
logsDir,
|
||||||
|
|
|
@ -52,7 +52,6 @@ import java.nio.file.WatchService;
|
||||||
import java.nio.file.attribute.FileAttribute;
|
import java.nio.file.attribute.FileAttribute;
|
||||||
import java.nio.file.spi.FileSystemProvider;
|
import java.nio.file.spi.FileSystemProvider;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -147,7 +146,18 @@ public class EntitlementInitialization {
|
||||||
List<Scope> serverScopes = new ArrayList<>();
|
List<Scope> serverScopes = new ArrayList<>();
|
||||||
Collections.addAll(
|
Collections.addAll(
|
||||||
serverScopes,
|
serverScopes,
|
||||||
new Scope("org.elasticsearch.base", List.of(new CreateClassLoaderEntitlement())),
|
new Scope(
|
||||||
|
"org.elasticsearch.base",
|
||||||
|
List.of(
|
||||||
|
new CreateClassLoaderEntitlement(),
|
||||||
|
new FilesEntitlement(
|
||||||
|
List.of(
|
||||||
|
FileData.ofPath(bootstrapArgs.repoDirResolver().apply(""), READ_WRITE),
|
||||||
|
FileData.ofRelativePath(Path.of(""), FilesEntitlement.BaseDir.DATA, READ_WRITE)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
),
|
||||||
new Scope("org.elasticsearch.xcontent", List.of(new CreateClassLoaderEntitlement())),
|
new Scope("org.elasticsearch.xcontent", List.of(new CreateClassLoaderEntitlement())),
|
||||||
new Scope(
|
new Scope(
|
||||||
"org.elasticsearch.server",
|
"org.elasticsearch.server",
|
||||||
|
@ -160,11 +170,14 @@ public class EntitlementInitialization {
|
||||||
new LoadNativeLibrariesEntitlement(),
|
new LoadNativeLibrariesEntitlement(),
|
||||||
new ManageThreadsEntitlement(),
|
new ManageThreadsEntitlement(),
|
||||||
new FilesEntitlement(
|
new FilesEntitlement(
|
||||||
Stream.concat(
|
List.of(
|
||||||
Stream.of(
|
// Base ES directories
|
||||||
FileData.ofPath(bootstrapArgs.tempDir(), READ_WRITE),
|
FileData.ofPath(bootstrapArgs.tempDir(), READ_WRITE),
|
||||||
FileData.ofPath(bootstrapArgs.configDir(), READ),
|
FileData.ofPath(bootstrapArgs.configDir(), READ),
|
||||||
FileData.ofPath(bootstrapArgs.logsDir(), READ_WRITE),
|
FileData.ofPath(bootstrapArgs.logsDir(), READ_WRITE),
|
||||||
|
FileData.ofRelativePath(Path.of(""), FilesEntitlement.BaseDir.DATA, READ_WRITE),
|
||||||
|
FileData.ofPath(bootstrapArgs.repoDirResolver().apply(""), READ_WRITE),
|
||||||
|
|
||||||
// OS release on Linux
|
// OS release on Linux
|
||||||
FileData.ofPath(Path.of("/etc/os-release"), READ),
|
FileData.ofPath(Path.of("/etc/os-release"), READ),
|
||||||
FileData.ofPath(Path.of("/etc/system-release"), READ),
|
FileData.ofPath(Path.of("/etc/system-release"), READ),
|
||||||
|
@ -182,9 +195,7 @@ public class EntitlementInitialization {
|
||||||
// // io stats on Linux
|
// // io stats on Linux
|
||||||
FileData.ofPath(Path.of("/proc/self/mountinfo"), READ),
|
FileData.ofPath(Path.of("/proc/self/mountinfo"), READ),
|
||||||
FileData.ofPath(Path.of("/proc/diskstats"), READ)
|
FileData.ofPath(Path.of("/proc/diskstats"), READ)
|
||||||
),
|
)
|
||||||
Arrays.stream(bootstrapArgs.dataDirs()).map(d -> FileData.ofPath(d, READ))
|
|
||||||
).toList()
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
|
@ -196,12 +207,17 @@ public class EntitlementInitialization {
|
||||||
new LoadNativeLibrariesEntitlement(),
|
new LoadNativeLibrariesEntitlement(),
|
||||||
new ManageThreadsEntitlement(),
|
new ManageThreadsEntitlement(),
|
||||||
new FilesEntitlement(
|
new FilesEntitlement(
|
||||||
Stream.concat(
|
List.of(
|
||||||
Stream.of(FileData.ofPath(bootstrapArgs.configDir(), READ)),
|
FileData.ofPath(bootstrapArgs.configDir(), READ),
|
||||||
Arrays.stream(bootstrapArgs.dataDirs()).map(d -> FileData.ofPath(d, READ_WRITE))
|
FileData.ofPath(bootstrapArgs.tempDir(), READ),
|
||||||
).toList()
|
FileData.ofRelativePath(Path.of(""), FilesEntitlement.BaseDir.DATA, READ_WRITE)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
)
|
||||||
|
),
|
||||||
|
new Scope(
|
||||||
|
"org.apache.lucene.misc",
|
||||||
|
List.of(new FilesEntitlement(List.of(FileData.ofRelativePath(Path.of(""), FilesEntitlement.BaseDir.DATA, READ_WRITE))))
|
||||||
),
|
),
|
||||||
new Scope("org.apache.logging.log4j.core", List.of(new ManageThreadsEntitlement())),
|
new Scope("org.apache.logging.log4j.core", List.of(new ManageThreadsEntitlement())),
|
||||||
new Scope(
|
new Scope(
|
||||||
|
@ -215,8 +231,10 @@ public class EntitlementInitialization {
|
||||||
|
|
||||||
Path trustStorePath = trustStorePath();
|
Path trustStorePath = trustStorePath();
|
||||||
if (trustStorePath != null) {
|
if (trustStorePath != null) {
|
||||||
serverScopes.add(
|
Collections.addAll(
|
||||||
new Scope("org.bouncycastle.fips.tls", List.of(new FilesEntitlement(List.of(FileData.ofPath(trustStorePath, READ)))))
|
serverScopes,
|
||||||
|
new Scope("org.bouncycastle.fips.tls", List.of(new FilesEntitlement(List.of(FileData.ofPath(trustStorePath, READ))))),
|
||||||
|
new Scope("org.bouncycastle.fips.core", List.of(new ManageThreadsEntitlement()))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -224,7 +242,16 @@ public class EntitlementInitialization {
|
||||||
var serverPolicy = new Policy("server", serverScopes);
|
var serverPolicy = new Policy("server", serverScopes);
|
||||||
// agents run without a module, so this is a special hack for the apm agent
|
// agents run without a module, so this is a special hack for the apm agent
|
||||||
// this should be removed once https://github.com/elastic/elasticsearch/issues/109335 is completed
|
// this should be removed once https://github.com/elastic/elasticsearch/issues/109335 is completed
|
||||||
List<Entitlement> agentEntitlements = List.of(new CreateClassLoaderEntitlement(), new ManageThreadsEntitlement());
|
List<Entitlement> agentEntitlements = List.of(
|
||||||
|
new CreateClassLoaderEntitlement(),
|
||||||
|
new ManageThreadsEntitlement(),
|
||||||
|
new FilesEntitlement(
|
||||||
|
List.of(
|
||||||
|
FileData.ofPath(Path.of("/co/elastic/apm/agent/"), READ),
|
||||||
|
FileData.ofPath(Path.of("/agent/co/elastic/apm/agent/"), READ)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
);
|
||||||
var resolver = EntitlementBootstrap.bootstrapArgs().pluginResolver();
|
var resolver = EntitlementBootstrap.bootstrapArgs().pluginResolver();
|
||||||
return new PolicyManager(
|
return new PolicyManager(
|
||||||
serverPolicy,
|
serverPolicy,
|
||||||
|
|
|
@ -9,6 +9,8 @@
|
||||||
|
|
||||||
package org.elasticsearch.entitlement.runtime.api;
|
package org.elasticsearch.entitlement.runtime.api;
|
||||||
|
|
||||||
|
import jdk.nio.Channels;
|
||||||
|
|
||||||
import org.elasticsearch.core.SuppressForbidden;
|
import org.elasticsearch.core.SuppressForbidden;
|
||||||
import org.elasticsearch.entitlement.bridge.EntitlementChecker;
|
import org.elasticsearch.entitlement.bridge.EntitlementChecker;
|
||||||
import org.elasticsearch.entitlement.runtime.policy.PolicyManager;
|
import org.elasticsearch.entitlement.runtime.policy.PolicyManager;
|
||||||
|
@ -19,6 +21,7 @@ import java.io.FileFilter;
|
||||||
import java.io.FilenameFilter;
|
import java.io.FilenameFilter;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
|
import java.io.OutputStream;
|
||||||
import java.io.PrintStream;
|
import java.io.PrintStream;
|
||||||
import java.io.PrintWriter;
|
import java.io.PrintWriter;
|
||||||
import java.lang.foreign.AddressLayout;
|
import java.lang.foreign.AddressLayout;
|
||||||
|
@ -64,6 +67,8 @@ import java.nio.file.AccessMode;
|
||||||
import java.nio.file.CopyOption;
|
import java.nio.file.CopyOption;
|
||||||
import java.nio.file.DirectoryStream;
|
import java.nio.file.DirectoryStream;
|
||||||
import java.nio.file.FileStore;
|
import java.nio.file.FileStore;
|
||||||
|
import java.nio.file.FileVisitOption;
|
||||||
|
import java.nio.file.FileVisitor;
|
||||||
import java.nio.file.Files;
|
import java.nio.file.Files;
|
||||||
import java.nio.file.LinkOption;
|
import java.nio.file.LinkOption;
|
||||||
import java.nio.file.OpenOption;
|
import java.nio.file.OpenOption;
|
||||||
|
@ -71,10 +76,17 @@ import java.nio.file.Path;
|
||||||
import java.nio.file.StandardOpenOption;
|
import java.nio.file.StandardOpenOption;
|
||||||
import java.nio.file.WatchEvent;
|
import java.nio.file.WatchEvent;
|
||||||
import java.nio.file.WatchService;
|
import java.nio.file.WatchService;
|
||||||
|
import java.nio.file.attribute.BasicFileAttributes;
|
||||||
import java.nio.file.attribute.FileAttribute;
|
import java.nio.file.attribute.FileAttribute;
|
||||||
|
import java.nio.file.attribute.FileAttributeView;
|
||||||
|
import java.nio.file.attribute.FileTime;
|
||||||
|
import java.nio.file.attribute.PosixFilePermission;
|
||||||
import java.nio.file.attribute.UserPrincipal;
|
import java.nio.file.attribute.UserPrincipal;
|
||||||
import java.nio.file.spi.FileSystemProvider;
|
import java.nio.file.spi.FileSystemProvider;
|
||||||
|
import java.security.KeyStore;
|
||||||
|
import java.security.Provider;
|
||||||
import java.security.cert.CertStoreParameters;
|
import java.security.cert.CertStoreParameters;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -83,6 +95,7 @@ import java.util.Set;
|
||||||
import java.util.TimeZone;
|
import java.util.TimeZone;
|
||||||
import java.util.concurrent.ExecutorService;
|
import java.util.concurrent.ExecutorService;
|
||||||
import java.util.concurrent.ForkJoinPool;
|
import java.util.concurrent.ForkJoinPool;
|
||||||
|
import java.util.function.BiPredicate;
|
||||||
import java.util.function.Consumer;
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
import javax.net.ssl.HostnameVerifier;
|
import javax.net.ssl.HostnameVerifier;
|
||||||
|
@ -1232,6 +1245,36 @@ public class ElasticsearchEntitlementChecker implements EntitlementChecker {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_security_KeyStore$$getInstance(Class<?> callerClass, File file, char[] password) {
|
||||||
|
policyManager.checkFileRead(callerClass, file);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_security_KeyStore$$getInstance(Class<?> callerClass, File file, KeyStore.LoadStoreParameter param) {
|
||||||
|
policyManager.checkFileRead(callerClass, file);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_security_KeyStore$Builder$$newInstance(
|
||||||
|
Class<?> callerClass,
|
||||||
|
File file,
|
||||||
|
KeyStore.ProtectionParameter protection
|
||||||
|
) {
|
||||||
|
policyManager.checkFileRead(callerClass, file);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_security_KeyStore$Builder$$newInstance(
|
||||||
|
Class<?> callerClass,
|
||||||
|
String type,
|
||||||
|
Provider provider,
|
||||||
|
File file,
|
||||||
|
KeyStore.ProtectionParameter protection
|
||||||
|
) {
|
||||||
|
policyManager.checkFileRead(callerClass, file);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void check$java_util_Scanner$(Class<?> callerClass, File source) {
|
public void check$java_util_Scanner$(Class<?> callerClass, File source) {
|
||||||
policyManager.checkFileRead(callerClass, source);
|
policyManager.checkFileRead(callerClass, source);
|
||||||
|
@ -1247,8 +1290,134 @@ public class ElasticsearchEntitlementChecker implements EntitlementChecker {
|
||||||
policyManager.checkFileRead(callerClass, source);
|
policyManager.checkFileRead(callerClass, source);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_util_jar_JarFile$(Class<?> callerClass, String name) {
|
||||||
|
policyManager.checkFileRead(callerClass, new File(name));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_util_jar_JarFile$(Class<?> callerClass, String name, boolean verify) {
|
||||||
|
policyManager.checkFileRead(callerClass, new File(name));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_util_jar_JarFile$(Class<?> callerClass, File file) {
|
||||||
|
policyManager.checkFileRead(callerClass, file);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_util_jar_JarFile$(Class<?> callerClass, File file, boolean verify) {
|
||||||
|
policyManager.checkFileRead(callerClass, file);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_util_jar_JarFile$(Class<?> callerClass, File file, boolean verify, int mode) {
|
||||||
|
policyManager.checkFileWithZipMode(callerClass, file, mode);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_util_jar_JarFile$(Class<?> callerClass, File file, boolean verify, int mode, Runtime.Version version) {
|
||||||
|
policyManager.checkFileWithZipMode(callerClass, file, mode);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_util_zip_ZipFile$(Class<?> callerClass, String name) {
|
||||||
|
policyManager.checkFileRead(callerClass, new File(name));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_util_zip_ZipFile$(Class<?> callerClass, String name, Charset charset) {
|
||||||
|
policyManager.checkFileRead(callerClass, new File(name));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_util_zip_ZipFile$(Class<?> callerClass, File file) {
|
||||||
|
policyManager.checkFileRead(callerClass, file);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_util_zip_ZipFile$(Class<?> callerClass, File file, int mode) {
|
||||||
|
policyManager.checkFileWithZipMode(callerClass, file, mode);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_util_zip_ZipFile$(Class<?> callerClass, File file, Charset charset) {
|
||||||
|
policyManager.checkFileRead(callerClass, file);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_util_zip_ZipFile$(Class<?> callerClass, File file, int mode, Charset charset) {
|
||||||
|
policyManager.checkFileWithZipMode(callerClass, file, mode);
|
||||||
|
}
|
||||||
|
|
||||||
// nio
|
// nio
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_channels_FileChannel$(Class<?> callerClass) {
|
||||||
|
policyManager.checkChangeFilesHandling(callerClass);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_channels_FileChannel$$open(
|
||||||
|
Class<?> callerClass,
|
||||||
|
Path path,
|
||||||
|
Set<? extends OpenOption> options,
|
||||||
|
FileAttribute<?>... attrs
|
||||||
|
) {
|
||||||
|
if (isOpenForWrite(options)) {
|
||||||
|
policyManager.checkFileWrite(callerClass, path);
|
||||||
|
} else {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_channels_FileChannel$$open(Class<?> callerClass, Path path, OpenOption... options) {
|
||||||
|
if (isOpenForWrite(options)) {
|
||||||
|
policyManager.checkFileWrite(callerClass, path);
|
||||||
|
} else {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_channels_AsynchronousFileChannel$(Class<?> callerClass) {
|
||||||
|
policyManager.checkChangeFilesHandling(callerClass);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_channels_AsynchronousFileChannel$$open(
|
||||||
|
Class<?> callerClass,
|
||||||
|
Path path,
|
||||||
|
Set<? extends OpenOption> options,
|
||||||
|
ExecutorService executor,
|
||||||
|
FileAttribute<?>... attrs
|
||||||
|
) {
|
||||||
|
if (isOpenForWrite(options)) {
|
||||||
|
policyManager.checkFileWrite(callerClass, path);
|
||||||
|
} else {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_channels_AsynchronousFileChannel$$open(Class<?> callerClass, Path path, OpenOption... options) {
|
||||||
|
if (isOpenForWrite(options)) {
|
||||||
|
policyManager.checkFileWrite(callerClass, path);
|
||||||
|
} else {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$jdk_nio_Channels$$readWriteSelectableChannel(
|
||||||
|
Class<?> callerClass,
|
||||||
|
FileDescriptor fd,
|
||||||
|
Channels.SelectableChannelCloser closer
|
||||||
|
) {
|
||||||
|
policyManager.checkFileDescriptorWrite(callerClass);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void check$java_nio_file_Files$$getOwner(Class<?> callerClass, Path path, LinkOption... options) {
|
public void check$java_nio_file_Files$$getOwner(Class<?> callerClass, Path path, LinkOption... options) {
|
||||||
policyManager.checkFileRead(callerClass, path);
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
@ -1264,6 +1433,411 @@ public class ElasticsearchEntitlementChecker implements EntitlementChecker {
|
||||||
policyManager.checkFileWrite(callerClass, path);
|
policyManager.checkFileWrite(callerClass, path);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$newInputStream(Class<?> callerClass, Path path, OpenOption... options) {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$newOutputStream(Class<?> callerClass, Path path, OpenOption... options) {
|
||||||
|
policyManager.checkFileWrite(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$newByteChannel(
|
||||||
|
Class<?> callerClass,
|
||||||
|
Path path,
|
||||||
|
Set<? extends OpenOption> options,
|
||||||
|
FileAttribute<?>... attrs
|
||||||
|
) {
|
||||||
|
if (isOpenForWrite(options)) {
|
||||||
|
policyManager.checkFileWrite(callerClass, path);
|
||||||
|
} else {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$newByteChannel(Class<?> callerClass, Path path, OpenOption... options) {
|
||||||
|
if (isOpenForWrite(options)) {
|
||||||
|
policyManager.checkFileWrite(callerClass, path);
|
||||||
|
} else {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$newDirectoryStream(Class<?> callerClass, Path dir) {
|
||||||
|
policyManager.checkFileRead(callerClass, dir);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$newDirectoryStream(Class<?> callerClass, Path dir, String glob) {
|
||||||
|
policyManager.checkFileRead(callerClass, dir);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$newDirectoryStream(Class<?> callerClass, Path dir, DirectoryStream.Filter<? super Path> filter) {
|
||||||
|
policyManager.checkFileRead(callerClass, dir);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$createFile(Class<?> callerClass, Path path, FileAttribute<?>... attrs) {
|
||||||
|
policyManager.checkFileWrite(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$createDirectory(Class<?> callerClass, Path dir, FileAttribute<?>... attrs) {
|
||||||
|
policyManager.checkFileWrite(callerClass, dir);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$createDirectories(Class<?> callerClass, Path dir, FileAttribute<?>... attrs) {
|
||||||
|
policyManager.checkFileWrite(callerClass, dir);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$createTempFile(
|
||||||
|
Class<?> callerClass,
|
||||||
|
Path dir,
|
||||||
|
String prefix,
|
||||||
|
String suffix,
|
||||||
|
FileAttribute<?>... attrs
|
||||||
|
) {
|
||||||
|
policyManager.checkFileWrite(callerClass, dir);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$createTempFile(Class<?> callerClass, String prefix, String suffix, FileAttribute<?>... attrs) {
|
||||||
|
policyManager.checkCreateTempFile(callerClass);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$createTempDirectory(Class<?> callerClass, Path dir, String prefix, FileAttribute<?>... attrs) {
|
||||||
|
policyManager.checkFileWrite(callerClass, dir);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$createTempDirectory(Class<?> callerClass, String prefix, FileAttribute<?>... attrs) {
|
||||||
|
policyManager.checkCreateTempFile(callerClass);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$createSymbolicLink(Class<?> callerClass, Path link, Path target, FileAttribute<?>... attrs) {
|
||||||
|
policyManager.checkFileRead(callerClass, target);
|
||||||
|
policyManager.checkFileWrite(callerClass, link);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$createLink(Class<?> callerClass, Path link, Path existing) {
|
||||||
|
policyManager.checkFileRead(callerClass, existing);
|
||||||
|
policyManager.checkFileWrite(callerClass, link);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$delete(Class<?> callerClass, Path path) {
|
||||||
|
policyManager.checkFileWrite(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$deleteIfExists(Class<?> callerClass, Path path) {
|
||||||
|
policyManager.checkFileWrite(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$copy(Class<?> callerClass, Path source, Path target, CopyOption... options) {
|
||||||
|
policyManager.checkFileRead(callerClass, source);
|
||||||
|
policyManager.checkFileWrite(callerClass, target);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$move(Class<?> callerClass, Path source, Path target, CopyOption... options) {
|
||||||
|
policyManager.checkFileWrite(callerClass, source);
|
||||||
|
policyManager.checkFileWrite(callerClass, target);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$readSymbolicLink(Class<?> callerClass, Path link) {
|
||||||
|
policyManager.checkFileRead(callerClass, link);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$getFileStore(Class<?> callerClass, Path path) {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$isSameFile(Class<?> callerClass, Path path, Path path2) {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
policyManager.checkFileRead(callerClass, path2);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$mismatch(Class<?> callerClass, Path path, Path path2) {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
policyManager.checkFileRead(callerClass, path2);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$isHidden(Class<?> callerClass, Path path) {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$getFileAttributeView(
|
||||||
|
Class<?> callerClass,
|
||||||
|
Path path,
|
||||||
|
Class<? extends FileAttributeView> type,
|
||||||
|
LinkOption... options
|
||||||
|
) {
|
||||||
|
policyManager.checkGetFileAttributeView(callerClass);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$readAttributes(
|
||||||
|
Class<?> callerClass,
|
||||||
|
Path path,
|
||||||
|
Class<? extends BasicFileAttributes> type,
|
||||||
|
LinkOption... options
|
||||||
|
) {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$setAttribute(
|
||||||
|
Class<?> callerClass,
|
||||||
|
Path path,
|
||||||
|
String attribute,
|
||||||
|
Object value,
|
||||||
|
LinkOption... options
|
||||||
|
) {
|
||||||
|
policyManager.checkFileWrite(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$getAttribute(Class<?> callerClass, Path path, String attribute, LinkOption... options) {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$readAttributes(Class<?> callerClass, Path path, String attributes, LinkOption... options) {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$getPosixFilePermissions(Class<?> callerClass, Path path, LinkOption... options) {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$setPosixFilePermissions(Class<?> callerClass, Path path, Set<PosixFilePermission> perms) {
|
||||||
|
policyManager.checkFileWrite(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$isSymbolicLink(Class<?> callerClass, Path path) {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$isDirectory(Class<?> callerClass, Path path, LinkOption... options) {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$isRegularFile(Class<?> callerClass, Path path, LinkOption... options) {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$getLastModifiedTime(Class<?> callerClass, Path path, LinkOption... options) {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$setLastModifiedTime(Class<?> callerClass, Path path, FileTime time) {
|
||||||
|
policyManager.checkFileWrite(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$size(Class<?> callerClass, Path path) {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$exists(Class<?> callerClass, Path path, LinkOption... options) {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$notExists(Class<?> callerClass, Path path, LinkOption... options) {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$isReadable(Class<?> callerClass, Path path) {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$isWritable(Class<?> callerClass, Path path) {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$isExecutable(Class<?> callerClass, Path path) {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$walkFileTree(
|
||||||
|
Class<?> callerClass,
|
||||||
|
Path start,
|
||||||
|
Set<FileVisitOption> options,
|
||||||
|
int maxDepth,
|
||||||
|
FileVisitor<? super Path> visitor
|
||||||
|
) {
|
||||||
|
policyManager.checkFileRead(callerClass, start);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$walkFileTree(Class<?> callerClass, Path start, FileVisitor<? super Path> visitor) {
|
||||||
|
policyManager.checkFileRead(callerClass, start);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$newBufferedReader(Class<?> callerClass, Path path, Charset cs) {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$newBufferedReader(Class<?> callerClass, Path path) {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$newBufferedWriter(Class<?> callerClass, Path path, Charset cs, OpenOption... options) {
|
||||||
|
policyManager.checkFileWrite(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$newBufferedWriter(Class<?> callerClass, Path path, OpenOption... options) {
|
||||||
|
policyManager.checkFileWrite(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$copy(Class<?> callerClass, InputStream in, Path target, CopyOption... options) {
|
||||||
|
policyManager.checkFileWrite(callerClass, target);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$copy(Class<?> callerClass, Path source, OutputStream out) {
|
||||||
|
policyManager.checkFileRead(callerClass, source);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$readAllBytes(Class<?> callerClass, Path path) {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$readString(Class<?> callerClass, Path path) {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$readString(Class<?> callerClass, Path path, Charset cs) {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$readAllLines(Class<?> callerClass, Path path, Charset cs) {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$readAllLines(Class<?> callerClass, Path path) {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$write(Class<?> callerClass, Path path, byte[] bytes, OpenOption... options) {
|
||||||
|
policyManager.checkFileWrite(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$write(
|
||||||
|
Class<?> callerClass,
|
||||||
|
Path path,
|
||||||
|
Iterable<? extends CharSequence> lines,
|
||||||
|
Charset cs,
|
||||||
|
OpenOption... options
|
||||||
|
) {
|
||||||
|
policyManager.checkFileWrite(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$write(
|
||||||
|
Class<?> callerClass,
|
||||||
|
Path path,
|
||||||
|
Iterable<? extends CharSequence> lines,
|
||||||
|
OpenOption... options
|
||||||
|
) {
|
||||||
|
policyManager.checkFileWrite(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$writeString(Class<?> callerClass, Path path, CharSequence csq, OpenOption... options) {
|
||||||
|
policyManager.checkFileWrite(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$writeString(
|
||||||
|
Class<?> callerClass,
|
||||||
|
Path path,
|
||||||
|
CharSequence csq,
|
||||||
|
Charset cs,
|
||||||
|
OpenOption... options
|
||||||
|
) {
|
||||||
|
policyManager.checkFileWrite(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$list(Class<?> callerClass, Path dir) {
|
||||||
|
policyManager.checkFileRead(callerClass, dir);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$walk(Class<?> callerClass, Path start, int maxDepth, FileVisitOption... options) {
|
||||||
|
policyManager.checkFileRead(callerClass, start);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$walk(Class<?> callerClass, Path start, FileVisitOption... options) {
|
||||||
|
policyManager.checkFileRead(callerClass, start);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$find(
|
||||||
|
Class<?> callerClass,
|
||||||
|
Path start,
|
||||||
|
int maxDepth,
|
||||||
|
BiPredicate<Path, BasicFileAttributes> matcher,
|
||||||
|
FileVisitOption... options
|
||||||
|
) {
|
||||||
|
policyManager.checkFileRead(callerClass, start);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$lines(Class<?> callerClass, Path path, Charset cs) {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void check$java_nio_file_Files$$lines(Class<?> callerClass, Path path) {
|
||||||
|
policyManager.checkFileRead(callerClass, path);
|
||||||
|
}
|
||||||
|
|
||||||
// file system providers
|
// file system providers
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -1299,6 +1873,17 @@ public class ElasticsearchEntitlementChecker implements EntitlementChecker {
|
||||||
|| options.contains(StandardOpenOption.DELETE_ON_CLOSE);
|
|| options.contains(StandardOpenOption.DELETE_ON_CLOSE);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static boolean isOpenForWrite(OpenOption... options) {
|
||||||
|
return Arrays.stream(options)
|
||||||
|
.anyMatch(
|
||||||
|
o -> o.equals(StandardOpenOption.WRITE)
|
||||||
|
|| o.equals(StandardOpenOption.APPEND)
|
||||||
|
|| o.equals(StandardOpenOption.CREATE)
|
||||||
|
|| o.equals(StandardOpenOption.CREATE_NEW)
|
||||||
|
|| o.equals(StandardOpenOption.DELETE_ON_CLOSE)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void checkNewFileChannel(
|
public void checkNewFileChannel(
|
||||||
Class<?> callerClass,
|
Class<?> callerClass,
|
||||||
|
|
|
@ -11,6 +11,8 @@ package org.elasticsearch.entitlement.runtime.policy;
|
||||||
|
|
||||||
import org.elasticsearch.core.Strings;
|
import org.elasticsearch.core.Strings;
|
||||||
import org.elasticsearch.core.SuppressForbidden;
|
import org.elasticsearch.core.SuppressForbidden;
|
||||||
|
import org.elasticsearch.entitlement.bootstrap.EntitlementBootstrap;
|
||||||
|
import org.elasticsearch.entitlement.bridge.EntitlementChecker;
|
||||||
import org.elasticsearch.entitlement.instrumentation.InstrumentationService;
|
import org.elasticsearch.entitlement.instrumentation.InstrumentationService;
|
||||||
import org.elasticsearch.entitlement.runtime.api.NotEntitledException;
|
import org.elasticsearch.entitlement.runtime.api.NotEntitledException;
|
||||||
import org.elasticsearch.entitlement.runtime.policy.entitlements.CreateClassLoaderEntitlement;
|
import org.elasticsearch.entitlement.runtime.policy.entitlements.CreateClassLoaderEntitlement;
|
||||||
|
@ -48,6 +50,8 @@ import static java.util.Objects.requireNonNull;
|
||||||
import static java.util.function.Predicate.not;
|
import static java.util.function.Predicate.not;
|
||||||
import static java.util.stream.Collectors.groupingBy;
|
import static java.util.stream.Collectors.groupingBy;
|
||||||
import static java.util.stream.Collectors.toUnmodifiableMap;
|
import static java.util.stream.Collectors.toUnmodifiableMap;
|
||||||
|
import static java.util.zip.ZipFile.OPEN_DELETE;
|
||||||
|
import static java.util.zip.ZipFile.OPEN_READ;
|
||||||
|
|
||||||
public class PolicyManager {
|
public class PolicyManager {
|
||||||
private static final Logger logger = LogManager.getLogger(PolicyManager.class);
|
private static final Logger logger = LogManager.getLogger(PolicyManager.class);
|
||||||
|
@ -215,7 +219,8 @@ public class PolicyManager {
|
||||||
requestingClass.getModule().getName(),
|
requestingClass.getModule().getName(),
|
||||||
requestingClass,
|
requestingClass,
|
||||||
operationDescription.get()
|
operationDescription.get()
|
||||||
)
|
),
|
||||||
|
callerClass
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -254,6 +259,13 @@ public class PolicyManager {
|
||||||
checkChangeJVMGlobalState(callerClass);
|
checkChangeJVMGlobalState(callerClass);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check for operations that can modify the way file operations are handled
|
||||||
|
*/
|
||||||
|
public void checkChangeFilesHandling(Class<?> callerClass) {
|
||||||
|
checkChangeJVMGlobalState(callerClass);
|
||||||
|
}
|
||||||
|
|
||||||
@SuppressForbidden(reason = "Explicitly checking File apis")
|
@SuppressForbidden(reason = "Explicitly checking File apis")
|
||||||
public void checkFileRead(Class<?> callerClass, File file) {
|
public void checkFileRead(Class<?> callerClass, File file) {
|
||||||
checkFileRead(callerClass, file.toPath());
|
checkFileRead(callerClass, file.toPath());
|
||||||
|
@ -274,7 +286,8 @@ public class PolicyManager {
|
||||||
requestingClass.getModule().getName(),
|
requestingClass.getModule().getName(),
|
||||||
requestingClass,
|
requestingClass,
|
||||||
path
|
path
|
||||||
)
|
),
|
||||||
|
callerClass
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -299,11 +312,28 @@ public class PolicyManager {
|
||||||
requestingClass.getModule().getName(),
|
requestingClass.getModule().getName(),
|
||||||
requestingClass,
|
requestingClass,
|
||||||
path
|
path
|
||||||
)
|
),
|
||||||
|
callerClass
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void checkCreateTempFile(Class<?> callerClass) {
|
||||||
|
checkFileWrite(callerClass, pathLookup.tempDir());
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressForbidden(reason = "Explicitly checking File apis")
|
||||||
|
public void checkFileWithZipMode(Class<?> callerClass, File file, int zipMode) {
|
||||||
|
assert zipMode == OPEN_READ || zipMode == (OPEN_READ | OPEN_DELETE);
|
||||||
|
if ((zipMode & OPEN_DELETE) == OPEN_DELETE) {
|
||||||
|
// This needs both read and write, but we happen to know that checkFileWrite
|
||||||
|
// actually checks both.
|
||||||
|
checkFileWrite(callerClass, file);
|
||||||
|
} else {
|
||||||
|
checkFileRead(callerClass, file);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public void checkFileDescriptorRead(Class<?> callerClass) {
|
public void checkFileDescriptorRead(Class<?> callerClass) {
|
||||||
neverEntitled(callerClass, () -> "read file descriptor");
|
neverEntitled(callerClass, () -> "read file descriptor");
|
||||||
}
|
}
|
||||||
|
@ -348,14 +378,15 @@ public class PolicyManager {
|
||||||
}
|
}
|
||||||
|
|
||||||
var classEntitlements = getEntitlements(requestingClass);
|
var classEntitlements = getEntitlements(requestingClass);
|
||||||
checkFlagEntitlement(classEntitlements, InboundNetworkEntitlement.class, requestingClass);
|
checkFlagEntitlement(classEntitlements, InboundNetworkEntitlement.class, requestingClass, callerClass);
|
||||||
checkFlagEntitlement(classEntitlements, OutboundNetworkEntitlement.class, requestingClass);
|
checkFlagEntitlement(classEntitlements, OutboundNetworkEntitlement.class, requestingClass, callerClass);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void checkFlagEntitlement(
|
private static void checkFlagEntitlement(
|
||||||
ModuleEntitlements classEntitlements,
|
ModuleEntitlements classEntitlements,
|
||||||
Class<? extends Entitlement> entitlementClass,
|
Class<? extends Entitlement> entitlementClass,
|
||||||
Class<?> requestingClass
|
Class<?> requestingClass,
|
||||||
|
Class<?> callerClass
|
||||||
) {
|
) {
|
||||||
if (classEntitlements.hasEntitlement(entitlementClass) == false) {
|
if (classEntitlements.hasEntitlement(entitlementClass) == false) {
|
||||||
notEntitled(
|
notEntitled(
|
||||||
|
@ -365,7 +396,8 @@ public class PolicyManager {
|
||||||
requestingClass.getModule().getName(),
|
requestingClass.getModule().getName(),
|
||||||
requestingClass,
|
requestingClass,
|
||||||
PolicyParser.getEntitlementTypeName(entitlementClass)
|
PolicyParser.getEntitlementTypeName(entitlementClass)
|
||||||
)
|
),
|
||||||
|
callerClass
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
logger.debug(
|
logger.debug(
|
||||||
|
@ -405,12 +437,18 @@ public class PolicyManager {
|
||||||
requestingClass.getModule().getName(),
|
requestingClass.getModule().getName(),
|
||||||
requestingClass,
|
requestingClass,
|
||||||
property
|
property
|
||||||
)
|
),
|
||||||
|
callerClass
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void notEntitled(String message) {
|
private static void notEntitled(String message, Class<?> callerClass) {
|
||||||
throw new NotEntitledException(message);
|
var exception = new NotEntitledException(message);
|
||||||
|
// don't log self tests in EntitlementBootstrap
|
||||||
|
if (EntitlementBootstrap.class.equals(callerClass) == false) {
|
||||||
|
logger.warn(message, exception);
|
||||||
|
}
|
||||||
|
throw exception;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void checkManageThreadsEntitlement(Class<?> callerClass) {
|
public void checkManageThreadsEntitlement(Class<?> callerClass) {
|
||||||
|
@ -422,7 +460,7 @@ public class PolicyManager {
|
||||||
if (isTriviallyAllowed(requestingClass)) {
|
if (isTriviallyAllowed(requestingClass)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
checkFlagEntitlement(getEntitlements(requestingClass), entitlementClass, requestingClass);
|
checkFlagEntitlement(getEntitlements(requestingClass), entitlementClass, requestingClass, callerClass);
|
||||||
}
|
}
|
||||||
|
|
||||||
ModuleEntitlements getEntitlements(Class<?> requestingClass) {
|
ModuleEntitlements getEntitlements(Class<?> requestingClass) {
|
||||||
|
@ -522,6 +560,10 @@ public class PolicyManager {
|
||||||
logger.debug("Entitlement trivially allowed from system module [{}]", requestingClass.getModule().getName());
|
logger.debug("Entitlement trivially allowed from system module [{}]", requestingClass.getModule().getName());
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
if (EntitlementChecker.class.isAssignableFrom(requestingClass)) {
|
||||||
|
logger.debug("Entitlement trivially allowed for EntitlementChecker class");
|
||||||
|
return true;
|
||||||
|
}
|
||||||
logger.trace("Entitlement not trivially allowed");
|
logger.trace("Entitlement not trivially allowed");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
org.elasticsearch.analysis.common:
|
||||||
|
- files:
|
||||||
|
- relative_path: analysis
|
||||||
|
relative_to: config
|
||||||
|
mode: read
|
|
@ -1,3 +1,8 @@
|
||||||
|
org.elasticsearch.ingest.geoip:
|
||||||
|
- files:
|
||||||
|
- relative_path: "ingest-geoip"
|
||||||
|
relative_to: config
|
||||||
|
mode: read
|
||||||
com.maxmind.db:
|
com.maxmind.db:
|
||||||
- files:
|
- files:
|
||||||
- relative_path: "ingest-geoip/"
|
- relative_path: "ingest-geoip/"
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
org.elasticsearch.ingest.useragent:
|
||||||
|
- files:
|
||||||
|
- relative_path: ingest-user-agent
|
||||||
|
relative_to: config
|
||||||
|
mode: read
|
|
@ -1,3 +1,7 @@
|
||||||
ALL-UNNAMED:
|
ALL-UNNAMED:
|
||||||
- manage_threads
|
- manage_threads
|
||||||
- outbound_network
|
- outbound_network
|
||||||
|
- files:
|
||||||
|
- relative_path: ""
|
||||||
|
relative_to: config
|
||||||
|
mode: read
|
||||||
|
|
|
@ -6,3 +6,10 @@ io.netty.common:
|
||||||
mode: "read"
|
mode: "read"
|
||||||
- path: "/usr/lib/os-release"
|
- path: "/usr/lib/os-release"
|
||||||
mode: "read"
|
mode: "read"
|
||||||
|
- path: "/proc/sys/net/core/somaxconn"
|
||||||
|
mode: read
|
||||||
|
com.azure.identity:
|
||||||
|
- files:
|
||||||
|
- relative_path: "storage-azure/" #/config/storage-azure/azure-federated-token
|
||||||
|
relative_to: config
|
||||||
|
mode: read
|
||||||
|
|
|
@ -11,3 +11,5 @@ io.netty.common:
|
||||||
mode: "read"
|
mode: "read"
|
||||||
- path: "/usr/lib/os-release"
|
- path: "/usr/lib/os-release"
|
||||||
mode: "read"
|
mode: "read"
|
||||||
|
- path: "/proc/sys/net/core/somaxconn"
|
||||||
|
mode: read
|
||||||
|
|
|
@ -252,8 +252,6 @@ tests:
|
||||||
- class: org.elasticsearch.analysis.common.CommonAnalysisClientYamlTestSuiteIT
|
- class: org.elasticsearch.analysis.common.CommonAnalysisClientYamlTestSuiteIT
|
||||||
method: test {yaml=analysis-common/40_token_filters/stemmer_override file access}
|
method: test {yaml=analysis-common/40_token_filters/stemmer_override file access}
|
||||||
issue: https://github.com/elastic/elasticsearch/issues/121625
|
issue: https://github.com/elastic/elasticsearch/issues/121625
|
||||||
- class: org.elasticsearch.xpack.searchablesnapshots.hdfs.SecureHdfsSearchableSnapshotsIT
|
|
||||||
issue: https://github.com/elastic/elasticsearch/issues/121967
|
|
||||||
- class: org.elasticsearch.xpack.application.CohereServiceUpgradeIT
|
- class: org.elasticsearch.xpack.application.CohereServiceUpgradeIT
|
||||||
issue: https://github.com/elastic/elasticsearch/issues/121537
|
issue: https://github.com/elastic/elasticsearch/issues/121537
|
||||||
- class: org.elasticsearch.xpack.restart.FullClusterRestartIT
|
- class: org.elasticsearch.xpack.restart.FullClusterRestartIT
|
||||||
|
@ -290,10 +288,6 @@ tests:
|
||||||
- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT
|
- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT
|
||||||
method: test {yaml=reference/snapshot-restore/apis/get-snapshot-api/line_488}
|
method: test {yaml=reference/snapshot-restore/apis/get-snapshot-api/line_488}
|
||||||
issue: https://github.com/elastic/elasticsearch/issues/121611
|
issue: https://github.com/elastic/elasticsearch/issues/121611
|
||||||
- class: org.elasticsearch.repositories.blobstore.testkit.analyze.SecureHdfsRepositoryAnalysisRestIT
|
|
||||||
issue: https://github.com/elastic/elasticsearch/issues/122377
|
|
||||||
- class: org.elasticsearch.repositories.blobstore.testkit.analyze.HdfsRepositoryAnalysisRestIT
|
|
||||||
issue: https://github.com/elastic/elasticsearch/issues/122378
|
|
||||||
- class: org.elasticsearch.xpack.inference.mapper.SemanticInferenceMetadataFieldsRecoveryTests
|
- class: org.elasticsearch.xpack.inference.mapper.SemanticInferenceMetadataFieldsRecoveryTests
|
||||||
method: testSnapshotRecovery {p0=false p1=false}
|
method: testSnapshotRecovery {p0=false p1=false}
|
||||||
issue: https://github.com/elastic/elasticsearch/issues/122549
|
issue: https://github.com/elastic/elasticsearch/issues/122549
|
||||||
|
@ -328,6 +322,12 @@ tests:
|
||||||
issue: https://github.com/elastic/elasticsearch/issues/122913
|
issue: https://github.com/elastic/elasticsearch/issues/122913
|
||||||
- class: org.elasticsearch.xpack.search.AsyncSearchSecurityIT
|
- class: org.elasticsearch.xpack.search.AsyncSearchSecurityIT
|
||||||
issue: https://github.com/elastic/elasticsearch/issues/122940
|
issue: https://github.com/elastic/elasticsearch/issues/122940
|
||||||
|
- class: org.elasticsearch.action.admin.indices.create.ShrinkIndexIT
|
||||||
|
method: testShrinkIndexPrimaryTerm
|
||||||
|
issue: https://github.com/elastic/elasticsearch/issues/122974
|
||||||
|
- class: org.elasticsearch.test.apmintegration.TracesApmIT
|
||||||
|
method: testApmIntegration
|
||||||
|
issue: https://github.com/elastic/elasticsearch/issues/122129
|
||||||
|
|
||||||
# Examples:
|
# Examples:
|
||||||
#
|
#
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
org.elasticsearch.analysis.icu:
|
||||||
|
- files:
|
||||||
|
- relative_path: ""
|
||||||
|
relative_to: config
|
||||||
|
mode: read
|
|
@ -22,20 +22,35 @@ versions << [
|
||||||
'hadoop': '3.4.1'
|
'hadoop': '3.4.1'
|
||||||
]
|
]
|
||||||
|
|
||||||
|
def patched = Attribute.of('patched', Boolean)
|
||||||
|
|
||||||
configurations {
|
configurations {
|
||||||
hdfsFixture2
|
hdfsFixture2
|
||||||
hdfsFixture3
|
hdfsFixture3
|
||||||
|
compileClasspath {
|
||||||
|
attributes {
|
||||||
|
attribute(patched, true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
runtimeClasspath {
|
||||||
|
attributes {
|
||||||
|
attribute(patched, true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
testCompileClasspath {
|
||||||
|
attributes {
|
||||||
|
attribute(patched, true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
testRuntimeClasspath {
|
||||||
|
attributes {
|
||||||
|
attribute(patched, true)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
api project(path: 'hadoop-client-api', configuration: 'default')
|
api("org.apache.hadoop:hadoop-client-api:${versions.hadoop}")
|
||||||
if (isEclipse) {
|
|
||||||
/*
|
|
||||||
* Eclipse can't pick up the shadow dependency so we point it at *something*
|
|
||||||
* so it can compile things.
|
|
||||||
*/
|
|
||||||
api project(path: 'hadoop-client-api')
|
|
||||||
}
|
|
||||||
runtimeOnly "org.apache.hadoop:hadoop-client-runtime:${versions.hadoop}"
|
runtimeOnly "org.apache.hadoop:hadoop-client-runtime:${versions.hadoop}"
|
||||||
implementation "org.apache.hadoop:hadoop-hdfs:${versions.hadoop}"
|
implementation "org.apache.hadoop:hadoop-hdfs:${versions.hadoop}"
|
||||||
api "com.google.protobuf:protobuf-java:${versions.protobuf}"
|
api "com.google.protobuf:protobuf-java:${versions.protobuf}"
|
||||||
|
@ -69,6 +84,20 @@ dependencies {
|
||||||
|
|
||||||
hdfsFixture2 project(path: ':test:fixtures:hdfs-fixture', configuration: 'shadowedHdfs2')
|
hdfsFixture2 project(path: ':test:fixtures:hdfs-fixture', configuration: 'shadowedHdfs2')
|
||||||
hdfsFixture3 project(path: ':test:fixtures:hdfs-fixture', configuration: 'shadow')
|
hdfsFixture3 project(path: ':test:fixtures:hdfs-fixture', configuration: 'shadow')
|
||||||
|
|
||||||
|
attributesSchema {
|
||||||
|
attribute(patched)
|
||||||
|
}
|
||||||
|
artifactTypes.getByName("jar") {
|
||||||
|
attributes.attribute(patched, false)
|
||||||
|
}
|
||||||
|
registerTransform(org.elasticsearch.gradle.internal.dependencies.patches.hdfs.HdfsClassPatcher) {
|
||||||
|
from.attribute(patched, false)
|
||||||
|
to.attribute(patched, true)
|
||||||
|
parameters {
|
||||||
|
matchingArtifacts = ["hadoop-client-api"]
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
restResources {
|
restResources {
|
||||||
|
@ -190,6 +219,15 @@ tasks.named("thirdPartyAudit").configure {
|
||||||
'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$MemoryAccessor',
|
'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$MemoryAccessor',
|
||||||
'org.apache.hadoop.thirdparty.protobuf.MessageSchema',
|
'org.apache.hadoop.thirdparty.protobuf.MessageSchema',
|
||||||
'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$Android32MemoryAccessor',
|
'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$Android32MemoryAccessor',
|
||||||
'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$Android64MemoryAccessor'
|
'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$Android64MemoryAccessor',
|
||||||
|
'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$Android64MemoryAccessor',
|
||||||
|
'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm',
|
||||||
|
'org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm$Slot',
|
||||||
|
'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer',
|
||||||
|
'org.apache.hadoop.io.FastByteComparisons$LexicographicalComparerHolder$UnsafeComparer$1',
|
||||||
|
'org.apache.hadoop.io.nativeio.NativeIO',
|
||||||
|
'org.apache.hadoop.service.launcher.InterruptEscalator',
|
||||||
|
'org.apache.hadoop.service.launcher.IrqHandler',
|
||||||
|
'org.apache.hadoop.util.SignalLogger$Handler'
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,54 +0,0 @@
|
||||||
import org.gradle.api.file.ArchiveOperations
|
|
||||||
|
|
||||||
apply plugin: 'elasticsearch.java'
|
|
||||||
|
|
||||||
sourceSets {
|
|
||||||
patcher
|
|
||||||
}
|
|
||||||
|
|
||||||
configurations {
|
|
||||||
thejar {
|
|
||||||
canBeResolved = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
dependencies {
|
|
||||||
thejar("org.apache.hadoop:hadoop-client-api:${project.parent.versions.hadoop}") {
|
|
||||||
transitive = false
|
|
||||||
}
|
|
||||||
|
|
||||||
patcherImplementation 'org.ow2.asm:asm:9.7.1'
|
|
||||||
patcherImplementation 'org.ow2.asm:asm-tree:9.7.1'
|
|
||||||
}
|
|
||||||
|
|
||||||
def outputDir = layout.buildDirectory.dir("patched-classes")
|
|
||||||
|
|
||||||
def patchTask = tasks.register("patchClasses", JavaExec) {
|
|
||||||
inputs.files(configurations.thejar).withPathSensitivity(PathSensitivity.RELATIVE)
|
|
||||||
inputs.files(sourceSets.patcher.output).withPathSensitivity(PathSensitivity.RELATIVE)
|
|
||||||
outputs.dir(outputDir)
|
|
||||||
classpath = sourceSets.patcher.runtimeClasspath
|
|
||||||
mainClass = 'org.elasticsearch.hdfs.patch.HdfsClassPatcher'
|
|
||||||
def thejar = configurations.thejar
|
|
||||||
doFirst {
|
|
||||||
args(thejar.singleFile, outputDir.get().asFile)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
interface InjectedArchiveOps {
|
|
||||||
@Inject ArchiveOperations getArchiveOperations()
|
|
||||||
}
|
|
||||||
|
|
||||||
tasks.named('jar').configure {
|
|
||||||
dependsOn(configurations.thejar)
|
|
||||||
def injected = project.objects.newInstance(InjectedArchiveOps)
|
|
||||||
def thejar = configurations.thejar
|
|
||||||
from(patchTask)
|
|
||||||
from({ injected.getArchiveOperations().zipTree(thejar.singleFile) }) {
|
|
||||||
eachFile {
|
|
||||||
if (outputDir.get().file(it.relativePath.pathString).asFile.exists()) {
|
|
||||||
it.exclude()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,202 +0,0 @@
|
||||||
|
|
||||||
Apache License
|
|
||||||
Version 2.0, January 2004
|
|
||||||
http://www.apache.org/licenses/
|
|
||||||
|
|
||||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
|
||||||
|
|
||||||
1. Definitions.
|
|
||||||
|
|
||||||
"License" shall mean the terms and conditions for use, reproduction,
|
|
||||||
and distribution as defined by Sections 1 through 9 of this document.
|
|
||||||
|
|
||||||
"Licensor" shall mean the copyright owner or entity authorized by
|
|
||||||
the copyright owner that is granting the License.
|
|
||||||
|
|
||||||
"Legal Entity" shall mean the union of the acting entity and all
|
|
||||||
other entities that control, are controlled by, or are under common
|
|
||||||
control with that entity. For the purposes of this definition,
|
|
||||||
"control" means (i) the power, direct or indirect, to cause the
|
|
||||||
direction or management of such entity, whether by contract or
|
|
||||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
|
||||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
|
||||||
|
|
||||||
"You" (or "Your") shall mean an individual or Legal Entity
|
|
||||||
exercising permissions granted by this License.
|
|
||||||
|
|
||||||
"Source" form shall mean the preferred form for making modifications,
|
|
||||||
including but not limited to software source code, documentation
|
|
||||||
source, and configuration files.
|
|
||||||
|
|
||||||
"Object" form shall mean any form resulting from mechanical
|
|
||||||
transformation or translation of a Source form, including but
|
|
||||||
not limited to compiled object code, generated documentation,
|
|
||||||
and conversions to other media types.
|
|
||||||
|
|
||||||
"Work" shall mean the work of authorship, whether in Source or
|
|
||||||
Object form, made available under the License, as indicated by a
|
|
||||||
copyright notice that is included in or attached to the work
|
|
||||||
(an example is provided in the Appendix below).
|
|
||||||
|
|
||||||
"Derivative Works" shall mean any work, whether in Source or Object
|
|
||||||
form, that is based on (or derived from) the Work and for which the
|
|
||||||
editorial revisions, annotations, elaborations, or other modifications
|
|
||||||
represent, as a whole, an original work of authorship. For the purposes
|
|
||||||
of this License, Derivative Works shall not include works that remain
|
|
||||||
separable from, or merely link (or bind by name) to the interfaces of,
|
|
||||||
the Work and Derivative Works thereof.
|
|
||||||
|
|
||||||
"Contribution" shall mean any work of authorship, including
|
|
||||||
the original version of the Work and any modifications or additions
|
|
||||||
to that Work or Derivative Works thereof, that is intentionally
|
|
||||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
|
||||||
or by an individual or Legal Entity authorized to submit on behalf of
|
|
||||||
the copyright owner. For the purposes of this definition, "submitted"
|
|
||||||
means any form of electronic, verbal, or written communication sent
|
|
||||||
to the Licensor or its representatives, including but not limited to
|
|
||||||
communication on electronic mailing lists, source code control systems,
|
|
||||||
and issue tracking systems that are managed by, or on behalf of, the
|
|
||||||
Licensor for the purpose of discussing and improving the Work, but
|
|
||||||
excluding communication that is conspicuously marked or otherwise
|
|
||||||
designated in writing by the copyright owner as "Not a Contribution."
|
|
||||||
|
|
||||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
|
||||||
on behalf of whom a Contribution has been received by Licensor and
|
|
||||||
subsequently incorporated within the Work.
|
|
||||||
|
|
||||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
copyright license to reproduce, prepare Derivative Works of,
|
|
||||||
publicly display, publicly perform, sublicense, and distribute the
|
|
||||||
Work and such Derivative Works in Source or Object form.
|
|
||||||
|
|
||||||
3. Grant of Patent License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
(except as stated in this section) patent license to make, have made,
|
|
||||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
|
||||||
where such license applies only to those patent claims licensable
|
|
||||||
by such Contributor that are necessarily infringed by their
|
|
||||||
Contribution(s) alone or by combination of their Contribution(s)
|
|
||||||
with the Work to which such Contribution(s) was submitted. If You
|
|
||||||
institute patent litigation against any entity (including a
|
|
||||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
|
||||||
or a Contribution incorporated within the Work constitutes direct
|
|
||||||
or contributory patent infringement, then any patent licenses
|
|
||||||
granted to You under this License for that Work shall terminate
|
|
||||||
as of the date such litigation is filed.
|
|
||||||
|
|
||||||
4. Redistribution. You may reproduce and distribute copies of the
|
|
||||||
Work or Derivative Works thereof in any medium, with or without
|
|
||||||
modifications, and in Source or Object form, provided that You
|
|
||||||
meet the following conditions:
|
|
||||||
|
|
||||||
(a) You must give any other recipients of the Work or
|
|
||||||
Derivative Works a copy of this License; and
|
|
||||||
|
|
||||||
(b) You must cause any modified files to carry prominent notices
|
|
||||||
stating that You changed the files; and
|
|
||||||
|
|
||||||
(c) You must retain, in the Source form of any Derivative Works
|
|
||||||
that You distribute, all copyright, patent, trademark, and
|
|
||||||
attribution notices from the Source form of the Work,
|
|
||||||
excluding those notices that do not pertain to any part of
|
|
||||||
the Derivative Works; and
|
|
||||||
|
|
||||||
(d) If the Work includes a "NOTICE" text file as part of its
|
|
||||||
distribution, then any Derivative Works that You distribute must
|
|
||||||
include a readable copy of the attribution notices contained
|
|
||||||
within such NOTICE file, excluding those notices that do not
|
|
||||||
pertain to any part of the Derivative Works, in at least one
|
|
||||||
of the following places: within a NOTICE text file distributed
|
|
||||||
as part of the Derivative Works; within the Source form or
|
|
||||||
documentation, if provided along with the Derivative Works; or,
|
|
||||||
within a display generated by the Derivative Works, if and
|
|
||||||
wherever such third-party notices normally appear. The contents
|
|
||||||
of the NOTICE file are for informational purposes only and
|
|
||||||
do not modify the License. You may add Your own attribution
|
|
||||||
notices within Derivative Works that You distribute, alongside
|
|
||||||
or as an addendum to the NOTICE text from the Work, provided
|
|
||||||
that such additional attribution notices cannot be construed
|
|
||||||
as modifying the License.
|
|
||||||
|
|
||||||
You may add Your own copyright statement to Your modifications and
|
|
||||||
may provide additional or different license terms and conditions
|
|
||||||
for use, reproduction, or distribution of Your modifications, or
|
|
||||||
for any such Derivative Works as a whole, provided Your use,
|
|
||||||
reproduction, and distribution of the Work otherwise complies with
|
|
||||||
the conditions stated in this License.
|
|
||||||
|
|
||||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
|
||||||
any Contribution intentionally submitted for inclusion in the Work
|
|
||||||
by You to the Licensor shall be under the terms and conditions of
|
|
||||||
this License, without any additional terms or conditions.
|
|
||||||
Notwithstanding the above, nothing herein shall supersede or modify
|
|
||||||
the terms of any separate license agreement you may have executed
|
|
||||||
with Licensor regarding such Contributions.
|
|
||||||
|
|
||||||
6. Trademarks. This License does not grant permission to use the trade
|
|
||||||
names, trademarks, service marks, or product names of the Licensor,
|
|
||||||
except as required for reasonable and customary use in describing the
|
|
||||||
origin of the Work and reproducing the content of the NOTICE file.
|
|
||||||
|
|
||||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
|
||||||
agreed to in writing, Licensor provides the Work (and each
|
|
||||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
implied, including, without limitation, any warranties or conditions
|
|
||||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
|
||||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
|
||||||
appropriateness of using or redistributing the Work and assume any
|
|
||||||
risks associated with Your exercise of permissions under this License.
|
|
||||||
|
|
||||||
8. Limitation of Liability. In no event and under no legal theory,
|
|
||||||
whether in tort (including negligence), contract, or otherwise,
|
|
||||||
unless required by applicable law (such as deliberate and grossly
|
|
||||||
negligent acts) or agreed to in writing, shall any Contributor be
|
|
||||||
liable to You for damages, including any direct, indirect, special,
|
|
||||||
incidental, or consequential damages of any character arising as a
|
|
||||||
result of this License or out of the use or inability to use the
|
|
||||||
Work (including but not limited to damages for loss of goodwill,
|
|
||||||
work stoppage, computer failure or malfunction, or any and all
|
|
||||||
other commercial damages or losses), even if such Contributor
|
|
||||||
has been advised of the possibility of such damages.
|
|
||||||
|
|
||||||
9. Accepting Warranty or Additional Liability. While redistributing
|
|
||||||
the Work or Derivative Works thereof, You may choose to offer,
|
|
||||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
|
||||||
or other liability obligations and/or rights consistent with this
|
|
||||||
License. However, in accepting such obligations, You may act only
|
|
||||||
on Your own behalf and on Your sole responsibility, not on behalf
|
|
||||||
of any other Contributor, and only if You agree to indemnify,
|
|
||||||
defend, and hold each Contributor harmless for any liability
|
|
||||||
incurred by, or claims asserted against, such Contributor by reason
|
|
||||||
of your accepting any such warranty or additional liability.
|
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
APPENDIX: How to apply the Apache License to your work.
|
|
||||||
|
|
||||||
To apply the Apache License to your work, attach the following
|
|
||||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
|
||||||
replaced with your own identifying information. (Don't include
|
|
||||||
the brackets!) The text should be enclosed in the appropriate
|
|
||||||
comment syntax for the file format. We also recommend that a
|
|
||||||
file or class name and description of purpose be included on the
|
|
||||||
same "printed page" as the copyright notice for easier
|
|
||||||
identification within third-party archives.
|
|
||||||
|
|
||||||
Copyright [yyyy] [name of copyright owner]
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
|
@ -1,2 +0,0 @@
|
||||||
This product includes software developed by The Apache Software
|
|
||||||
Foundation (http://www.apache.org/).
|
|
|
@ -1,59 +0,0 @@
|
||||||
/*
|
|
||||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
|
||||||
* or more contributor license agreements. Licensed under the "Elastic License
|
|
||||||
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
|
||||||
* Public License v 1"; you may not use this file except in compliance with, at
|
|
||||||
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
|
||||||
* License v3.0 only", or the "Server Side Public License, v 1".
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.hdfs.patch;
|
|
||||||
|
|
||||||
import org.objectweb.asm.ClassReader;
|
|
||||||
import org.objectweb.asm.ClassVisitor;
|
|
||||||
import org.objectweb.asm.ClassWriter;
|
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.nio.file.Files;
|
|
||||||
import java.nio.file.Path;
|
|
||||||
import java.nio.file.Paths;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.function.Function;
|
|
||||||
import java.util.jar.JarEntry;
|
|
||||||
import java.util.jar.JarFile;
|
|
||||||
|
|
||||||
public class HdfsClassPatcher {
|
|
||||||
static final Map<String, Function<ClassWriter, ClassVisitor>> patchers = Map.of(
|
|
||||||
"org/apache/hadoop/util/ShutdownHookManager.class",
|
|
||||||
ShutdownHookManagerPatcher::new,
|
|
||||||
"org/apache/hadoop/util/Shell.class",
|
|
||||||
ShellPatcher::new,
|
|
||||||
"org/apache/hadoop/security/UserGroupInformation.class",
|
|
||||||
SubjectGetSubjectPatcher::new,
|
|
||||||
"org/apache/hadoop/security/authentication/client/KerberosAuthenticator.class",
|
|
||||||
SubjectGetSubjectPatcher::new
|
|
||||||
);
|
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
|
||||||
String jarPath = args[0];
|
|
||||||
Path outputDir = Paths.get(args[1]);
|
|
||||||
|
|
||||||
try (JarFile jarFile = new JarFile(new File(jarPath))) {
|
|
||||||
for (var patcher : patchers.entrySet()) {
|
|
||||||
JarEntry jarEntry = jarFile.getJarEntry(patcher.getKey());
|
|
||||||
if (jarEntry == null) {
|
|
||||||
throw new IllegalArgumentException("path [" + patcher.getKey() + "] not found in [" + jarPath + "]");
|
|
||||||
}
|
|
||||||
byte[] classToPatch = jarFile.getInputStream(jarEntry).readAllBytes();
|
|
||||||
|
|
||||||
ClassReader classReader = new ClassReader(classToPatch);
|
|
||||||
ClassWriter classWriter = new ClassWriter(classReader, 0);
|
|
||||||
classReader.accept(patcher.getValue().apply(classWriter), 0);
|
|
||||||
|
|
||||||
Path outputFile = outputDir.resolve(patcher.getKey());
|
|
||||||
Files.createDirectories(outputFile.getParent());
|
|
||||||
Files.write(outputFile, classWriter.toByteArray());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -5,3 +5,7 @@ ALL-UNNAMED:
|
||||||
- write_system_properties:
|
- write_system_properties:
|
||||||
properties:
|
properties:
|
||||||
- hadoop.home.dir
|
- hadoop.home.dir
|
||||||
|
- files:
|
||||||
|
- relative_path: "repository-hdfs/"
|
||||||
|
relative_to: config
|
||||||
|
mode: read_write
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
ALL-UNNAMED:
|
||||||
|
- files:
|
||||||
|
- relative_path: "indices/"
|
||||||
|
relative_to: data
|
||||||
|
mode: read_write
|
|
@ -65,10 +65,6 @@
|
||||||
],
|
],
|
||||||
"default":"open",
|
"default":"open",
|
||||||
"description":"Whether to expand wildcard expression to concrete indices that are open, closed or both."
|
"description":"Whether to expand wildcard expression to concrete indices that are open, closed or both."
|
||||||
},
|
|
||||||
"local":{
|
|
||||||
"type":"boolean",
|
|
||||||
"description":"Return local information, do not retrieve the state from master node (default: false)"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -50,17 +50,3 @@ setup:
|
||||||
|
|
||||||
- match: {test_index.mappings.text.mapping.text.type: text}
|
- match: {test_index.mappings.text.mapping.text.type: text}
|
||||||
- match: {test_index.mappings.text.mapping.text.analyzer: default}
|
- match: {test_index.mappings.text.mapping.text.analyzer: default}
|
||||||
|
|
||||||
---
|
|
||||||
"Get field mapping with local parameter should fail":
|
|
||||||
|
|
||||||
- requires:
|
|
||||||
test_runner_features: ["warnings"]
|
|
||||||
cluster_features: ["gte_v8.0.0"]
|
|
||||||
reason: "local parameter for get field mapping API was allowed before v8"
|
|
||||||
|
|
||||||
- do:
|
|
||||||
catch: bad_request
|
|
||||||
indices.get_field_mapping:
|
|
||||||
fields: text
|
|
||||||
local: true
|
|
||||||
|
|
|
@ -247,6 +247,7 @@ class Elasticsearch {
|
||||||
pluginsResolver::resolveClassToPluginName,
|
pluginsResolver::resolveClassToPluginName,
|
||||||
nodeEnv.settings()::get,
|
nodeEnv.settings()::get,
|
||||||
nodeEnv.settings()::getGlobValues,
|
nodeEnv.settings()::getGlobValues,
|
||||||
|
nodeEnv::resolveRepoDir,
|
||||||
nodeEnv.dataDirs(),
|
nodeEnv.dataDirs(),
|
||||||
nodeEnv.configDir(),
|
nodeEnv.configDir(),
|
||||||
nodeEnv.logsDir(),
|
nodeEnv.logsDir(),
|
||||||
|
|
|
@ -112,6 +112,9 @@ public class RoutingNode implements Iterable<ShardRouting> {
|
||||||
return this.nodeId;
|
return this.nodeId;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Number of shards assigned to this node. Includes relocating shards. Use {@link #numberOfOwningShards()} to exclude relocating shards.
|
||||||
|
*/
|
||||||
public int size() {
|
public int size() {
|
||||||
return shards.size();
|
return shards.size();
|
||||||
}
|
}
|
||||||
|
|
|
@ -521,11 +521,6 @@ public class DesiredBalanceReconciler {
|
||||||
}
|
}
|
||||||
|
|
||||||
private DesiredBalanceMetrics.AllocationStats balance() {
|
private DesiredBalanceMetrics.AllocationStats balance() {
|
||||||
// Check if rebalancing is disabled.
|
|
||||||
if (allocation.deciders().canRebalance(allocation).type() != Decision.Type.YES) {
|
|
||||||
return DesiredBalanceMetrics.EMPTY_ALLOCATION_STATS;
|
|
||||||
}
|
|
||||||
|
|
||||||
int unassignedShards = routingNodes.unassigned().size() + routingNodes.unassigned().ignored().size();
|
int unassignedShards = routingNodes.unassigned().size() + routingNodes.unassigned().ignored().size();
|
||||||
int totalAllocations = 0;
|
int totalAllocations = 0;
|
||||||
int undesiredAllocationsExcludingShuttingDownNodes = 0;
|
int undesiredAllocationsExcludingShuttingDownNodes = 0;
|
||||||
|
@ -555,9 +550,15 @@ public class DesiredBalanceReconciler {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (allocation.metadata().nodeShutdowns().contains(shardRouting.currentNodeId()) == false) {
|
if (allocation.metadata().nodeShutdowns().contains(shardRouting.currentNodeId()) == false) {
|
||||||
|
// shard is not on a shutting down node, nor is it on a desired node per the previous check.
|
||||||
undesiredAllocationsExcludingShuttingDownNodes++;
|
undesiredAllocationsExcludingShuttingDownNodes++;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (allocation.deciders().canRebalance(allocation).type() != Decision.Type.YES) {
|
||||||
|
// Rebalancing is disabled, we're just here to collect the AllocationStats to return.
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
if (allocation.deciders().canRebalance(shardRouting, allocation).type() != Decision.Type.YES) {
|
if (allocation.deciders().canRebalance(shardRouting, allocation).type() != Decision.Type.YES) {
|
||||||
// rebalancing disabled for this shard
|
// rebalancing disabled for this shard
|
||||||
continue;
|
continue;
|
||||||
|
|
|
@ -75,9 +75,9 @@ public abstract class AllocationDecider {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns a {@link Decision} whether the cluster can execute
|
* Returns a {@link Decision} on whether the cluster is allowed to rebalance shards to improve relative node shard weights and
|
||||||
* re-balanced operations at all.
|
* performance.
|
||||||
* {@link Decision#ALWAYS}.
|
* @return {@link Decision#ALWAYS} is returned by default if not overridden.
|
||||||
*/
|
*/
|
||||||
public Decision canRebalance(RoutingAllocation allocation) {
|
public Decision canRebalance(RoutingAllocation allocation) {
|
||||||
return Decision.ALWAYS;
|
return Decision.ALWAYS;
|
||||||
|
|
|
@ -82,6 +82,10 @@ public class AllocationDeciders {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns whether rebalancing (move shards to improve relative node weights and performance) is allowed right now.
|
||||||
|
* Rebalancing can be disabled via cluster settings, or throttled by cluster settings (e.g. max concurrent shard moves).
|
||||||
|
*/
|
||||||
public Decision canRebalance(RoutingAllocation allocation) {
|
public Decision canRebalance(RoutingAllocation allocation) {
|
||||||
return withDeciders(
|
return withDeciders(
|
||||||
allocation,
|
allocation,
|
||||||
|
|
|
@ -150,6 +150,10 @@ public class ClusterRebalanceAllocationDecider extends AllocationDecider {
|
||||||
+ "]"
|
+ "]"
|
||||||
);
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rebalancing may be enabled, disabled, or only allowed after all primaries have started, depending on the cluster setting
|
||||||
|
* {@link #CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING}.
|
||||||
|
*/
|
||||||
@SuppressWarnings("fallthrough")
|
@SuppressWarnings("fallthrough")
|
||||||
@Override
|
@Override
|
||||||
public Decision canRebalance(RoutingAllocation allocation) {
|
public Decision canRebalance(RoutingAllocation allocation) {
|
||||||
|
|
|
@ -61,6 +61,11 @@ public class ConcurrentRebalanceAllocationDecider extends AllocationDecider {
|
||||||
return canRebalance(allocation);
|
return canRebalance(allocation);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* We allow a limited number of concurrent shard relocations, per the cluster setting
|
||||||
|
* {@link #CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING}.
|
||||||
|
* Returns a {@link Decision#THROTTLE} decision if the limit is exceeded, otherwise returns {@link Decision#YES}.
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Decision canRebalance(RoutingAllocation allocation) {
|
public Decision canRebalance(RoutingAllocation allocation) {
|
||||||
int relocatingShards = allocation.routingNodes().getRelocatingShardCount();
|
int relocatingShards = allocation.routingNodes().getRelocatingShardCount();
|
||||||
|
|
|
@ -147,6 +147,11 @@ public class EnableAllocationDecider extends AllocationDecider {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rebalancing is limited by the {@link Rebalance} value set on the cluster setting {@link #CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING}.
|
||||||
|
* We might allow movement only of primary shards, or replica shards, or none, or all.
|
||||||
|
* This method only concerns itself with whether {@link Rebalance#NONE} is set: rebalancing is allowed for all other setting values.
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Decision canRebalance(RoutingAllocation allocation) {
|
public Decision canRebalance(RoutingAllocation allocation) {
|
||||||
if (allocation.ignoreDisable()) {
|
if (allocation.ignoreDisable()) {
|
||||||
|
@ -246,7 +251,7 @@ public class EnableAllocationDecider extends AllocationDecider {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Rebalance values or rather their string representation to be used used with
|
* Rebalance values or rather their string representation to be used with
|
||||||
* {@link EnableAllocationDecider#CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING} /
|
* {@link EnableAllocationDecider#CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING} /
|
||||||
* {@link EnableAllocationDecider#INDEX_ROUTING_REBALANCE_ENABLE_SETTING}
|
* {@link EnableAllocationDecider#INDEX_ROUTING_REBALANCE_ENABLE_SETTING}
|
||||||
* via cluster / index settings.
|
* via cluster / index settings.
|
||||||
|
|
|
@ -28,6 +28,7 @@ import org.elasticsearch.index.mapper.IdFieldMapper;
|
||||||
import org.elasticsearch.index.mapper.KeywordFieldMapper;
|
import org.elasticsearch.index.mapper.KeywordFieldMapper;
|
||||||
import org.elasticsearch.index.mapper.MapperService;
|
import org.elasticsearch.index.mapper.MapperService;
|
||||||
import org.elasticsearch.index.mapper.MappingLookup;
|
import org.elasticsearch.index.mapper.MappingLookup;
|
||||||
|
import org.elasticsearch.index.mapper.MappingParserContext;
|
||||||
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
import org.elasticsearch.index.mapper.MetadataFieldMapper;
|
||||||
import org.elasticsearch.index.mapper.ProvidedIdFieldMapper;
|
import org.elasticsearch.index.mapper.ProvidedIdFieldMapper;
|
||||||
import org.elasticsearch.index.mapper.RoutingFieldMapper;
|
import org.elasticsearch.index.mapper.RoutingFieldMapper;
|
||||||
|
@ -90,7 +91,7 @@ public enum IndexMode {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public MetadataFieldMapper timeSeriesIdFieldMapper() {
|
public MetadataFieldMapper timeSeriesIdFieldMapper(MappingParserContext c) {
|
||||||
// non time-series indices must not have a TimeSeriesIdFieldMapper
|
// non time-series indices must not have a TimeSeriesIdFieldMapper
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -187,8 +188,8 @@ public enum IndexMode {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public MetadataFieldMapper timeSeriesIdFieldMapper() {
|
public MetadataFieldMapper timeSeriesIdFieldMapper(MappingParserContext c) {
|
||||||
return TimeSeriesIdFieldMapper.INSTANCE;
|
return TimeSeriesIdFieldMapper.getInstance(c);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -277,7 +278,7 @@ public enum IndexMode {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public MetadataFieldMapper timeSeriesIdFieldMapper() {
|
public MetadataFieldMapper timeSeriesIdFieldMapper(MappingParserContext c) {
|
||||||
// non time-series indices must not have a TimeSeriesIdFieldMapper
|
// non time-series indices must not have a TimeSeriesIdFieldMapper
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -348,7 +349,7 @@ public enum IndexMode {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public MetadataFieldMapper timeSeriesIdFieldMapper() {
|
public MetadataFieldMapper timeSeriesIdFieldMapper(MappingParserContext c) {
|
||||||
// non time-series indices must not have a TimeSeriesIdFieldMapper
|
// non time-series indices must not have a TimeSeriesIdFieldMapper
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -518,7 +519,7 @@ public enum IndexMode {
|
||||||
* the _tsid field. The field mapper will be added to the list of the metadata
|
* the _tsid field. The field mapper will be added to the list of the metadata
|
||||||
* field mappers for the index.
|
* field mappers for the index.
|
||||||
*/
|
*/
|
||||||
public abstract MetadataFieldMapper timeSeriesIdFieldMapper();
|
public abstract MetadataFieldMapper timeSeriesIdFieldMapper(MappingParserContext c);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return an instance of the {@link TimeSeriesRoutingHashFieldMapper} that generates
|
* Return an instance of the {@link TimeSeriesRoutingHashFieldMapper} that generates
|
||||||
|
|
|
@ -147,6 +147,7 @@ public class IndexVersions {
|
||||||
public static final IndexVersion UPGRADE_TO_LUCENE_10_1_0 = def(9_009_0_00, Version.LUCENE_10_1_0);
|
public static final IndexVersion UPGRADE_TO_LUCENE_10_1_0 = def(9_009_0_00, Version.LUCENE_10_1_0);
|
||||||
public static final IndexVersion USE_SYNTHETIC_SOURCE_FOR_RECOVERY_BY_DEFAULT = def(9_010_00_0, Version.LUCENE_10_1_0);
|
public static final IndexVersion USE_SYNTHETIC_SOURCE_FOR_RECOVERY_BY_DEFAULT = def(9_010_00_0, Version.LUCENE_10_1_0);
|
||||||
public static final IndexVersion TIMESTAMP_DOC_VALUES_SPARSE_INDEX = def(9_011_0_00, Version.LUCENE_10_1_0);
|
public static final IndexVersion TIMESTAMP_DOC_VALUES_SPARSE_INDEX = def(9_011_0_00, Version.LUCENE_10_1_0);
|
||||||
|
public static final IndexVersion TIME_SERIES_ID_DOC_VALUES_SPARSE_INDEX = def(9_012_0_00, Version.LUCENE_10_1_0);
|
||||||
/*
|
/*
|
||||||
* STOP! READ THIS FIRST! No, really,
|
* STOP! READ THIS FIRST! No, really,
|
||||||
* ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _
|
* ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _
|
||||||
|
|
|
@ -46,17 +46,36 @@ public class TimeSeriesIdFieldMapper extends MetadataFieldMapper {
|
||||||
public static final String NAME = "_tsid";
|
public static final String NAME = "_tsid";
|
||||||
public static final String CONTENT_TYPE = "_tsid";
|
public static final String CONTENT_TYPE = "_tsid";
|
||||||
public static final TimeSeriesIdFieldType FIELD_TYPE = new TimeSeriesIdFieldType();
|
public static final TimeSeriesIdFieldType FIELD_TYPE = new TimeSeriesIdFieldType();
|
||||||
public static final TimeSeriesIdFieldMapper INSTANCE = new TimeSeriesIdFieldMapper();
|
|
||||||
|
private static final TimeSeriesIdFieldMapper INSTANCE_WITHOUT_SKIPPER = new TimeSeriesIdFieldMapper(false);
|
||||||
|
private static final TimeSeriesIdFieldMapper INSTANCE_WITH_SKIPPER = new TimeSeriesIdFieldMapper(true);
|
||||||
|
|
||||||
|
public static TimeSeriesIdFieldMapper getInstance(boolean useDocValuesSkipper) {
|
||||||
|
if (useDocValuesSkipper) {
|
||||||
|
return INSTANCE_WITH_SKIPPER;
|
||||||
|
} else {
|
||||||
|
return INSTANCE_WITHOUT_SKIPPER;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static TimeSeriesIdFieldMapper getInstance(MappingParserContext context) {
|
||||||
|
boolean useDocValuesSkipper = context.indexVersionCreated().onOrAfter(IndexVersions.TIME_SERIES_ID_DOC_VALUES_SPARSE_INDEX)
|
||||||
|
&& context.getIndexSettings().useDocValuesSkipper();
|
||||||
|
return TimeSeriesIdFieldMapper.getInstance(useDocValuesSkipper);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public FieldMapper.Builder getMergeBuilder() {
|
public FieldMapper.Builder getMergeBuilder() {
|
||||||
return new Builder().init(this);
|
return new Builder(this.useDocValuesSkipper).init(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class Builder extends MetadataFieldMapper.Builder {
|
public static class Builder extends MetadataFieldMapper.Builder {
|
||||||
|
|
||||||
protected Builder() {
|
private final boolean useDocValuesSkipper;
|
||||||
|
|
||||||
|
protected Builder(boolean useDocValuesSkipper) {
|
||||||
super(NAME);
|
super(NAME);
|
||||||
|
this.useDocValuesSkipper = useDocValuesSkipper;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -66,11 +85,11 @@ public class TimeSeriesIdFieldMapper extends MetadataFieldMapper {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public TimeSeriesIdFieldMapper build() {
|
public TimeSeriesIdFieldMapper build() {
|
||||||
return INSTANCE;
|
return TimeSeriesIdFieldMapper.getInstance(useDocValuesSkipper);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static final TypeParser PARSER = new FixedTypeParser(c -> c.getIndexSettings().getMode().timeSeriesIdFieldMapper());
|
public static final TypeParser PARSER = new FixedTypeParser(c -> c.getIndexSettings().getMode().timeSeriesIdFieldMapper(c));
|
||||||
|
|
||||||
public static final class TimeSeriesIdFieldType extends MappedFieldType {
|
public static final class TimeSeriesIdFieldType extends MappedFieldType {
|
||||||
private TimeSeriesIdFieldType() {
|
private TimeSeriesIdFieldType() {
|
||||||
|
@ -115,8 +134,11 @@ public class TimeSeriesIdFieldMapper extends MetadataFieldMapper {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private TimeSeriesIdFieldMapper() {
|
private final boolean useDocValuesSkipper;
|
||||||
|
|
||||||
|
private TimeSeriesIdFieldMapper(boolean useDocValuesSkipper) {
|
||||||
super(FIELD_TYPE);
|
super(FIELD_TYPE);
|
||||||
|
this.useDocValuesSkipper = useDocValuesSkipper;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -135,7 +157,12 @@ public class TimeSeriesIdFieldMapper extends MetadataFieldMapper {
|
||||||
} else {
|
} else {
|
||||||
timeSeriesId = routingPathFields.buildHash().toBytesRef();
|
timeSeriesId = routingPathFields.buildHash().toBytesRef();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (this.useDocValuesSkipper) {
|
||||||
|
context.doc().add(SortedDocValuesField.indexedField(fieldType().name(), timeSeriesId));
|
||||||
|
} else {
|
||||||
context.doc().add(new SortedDocValuesField(fieldType().name(), timeSeriesId));
|
context.doc().add(new SortedDocValuesField(fieldType().name(), timeSeriesId));
|
||||||
|
}
|
||||||
|
|
||||||
BytesRef uidEncoded = TsidExtractingIdFieldMapper.createField(
|
BytesRef uidEncoded = TsidExtractingIdFieldMapper.createField(
|
||||||
context,
|
context,
|
||||||
|
|
|
@ -112,6 +112,8 @@ import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
public class DesiredBalanceReconcilerTests extends ESAllocationTestCase {
|
public class DesiredBalanceReconcilerTests extends ESAllocationTestCase {
|
||||||
|
|
||||||
|
private static AtomicReference<DesiredBalanceMetrics.AllocationStats> ALLOCATION_STATS_PLACEHOLDER = new AtomicReference<>();
|
||||||
|
|
||||||
public void testNoChangesOnEmptyDesiredBalance() {
|
public void testNoChangesOnEmptyDesiredBalance() {
|
||||||
final var clusterState = DesiredBalanceComputerTests.createInitialClusterState(3);
|
final var clusterState = DesiredBalanceComputerTests.createInitialClusterState(3);
|
||||||
final var routingAllocation = createRoutingAllocationFrom(clusterState);
|
final var routingAllocation = createRoutingAllocationFrom(clusterState);
|
||||||
|
@ -252,8 +254,9 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase {
|
||||||
(indexName, nodeId) -> indexName.equals("index-0") && nodeId.equals("node-0")
|
(indexName, nodeId) -> indexName.equals("index-0") && nodeId.equals("node-0")
|
||||||
);
|
);
|
||||||
|
|
||||||
|
AtomicReference<DesiredBalanceMetrics.AllocationStats> allocationStats = new AtomicReference<>();
|
||||||
final var allocationService = createTestAllocationService(
|
final var allocationService = createTestAllocationService(
|
||||||
routingAllocation -> reconcile(routingAllocation, desiredBalance),
|
routingAllocation -> reconcile(routingAllocation, desiredBalance, allocationStats),
|
||||||
new SameShardAllocationDecider(clusterSettings),
|
new SameShardAllocationDecider(clusterSettings),
|
||||||
new ReplicaAfterPrimaryActiveAllocationDecider(),
|
new ReplicaAfterPrimaryActiveAllocationDecider(),
|
||||||
new ThrottlingAllocationDecider(clusterSettings),
|
new ThrottlingAllocationDecider(clusterSettings),
|
||||||
|
@ -277,6 +280,8 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase {
|
||||||
final var index1RoutingTable = stateWithStartedPrimary.routingTable(project1).shardRoutingTable("index-1", 0);
|
final var index1RoutingTable = stateWithStartedPrimary.routingTable(project1).shardRoutingTable("index-1", 0);
|
||||||
assertTrue(index1RoutingTable.primaryShard().unassigned());
|
assertTrue(index1RoutingTable.primaryShard().unassigned());
|
||||||
assertTrue(index1RoutingTable.replicaShards().stream().allMatch(ShardRouting::unassigned));
|
assertTrue(index1RoutingTable.replicaShards().stream().allMatch(ShardRouting::unassigned));
|
||||||
|
assertNotNull(allocationStats.get());
|
||||||
|
assertEquals(new DesiredBalanceMetrics.AllocationStats(3, 1, 0), allocationStats.get());
|
||||||
}
|
}
|
||||||
|
|
||||||
// now relax the filter so that the replica of index-0 and the primary of index-1 can both be assigned to node-1, but the throttle
|
// now relax the filter so that the replica of index-0 and the primary of index-1 can both be assigned to node-1, but the throttle
|
||||||
|
@ -290,6 +295,8 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase {
|
||||||
final var index1RoutingTable = stateWithInitializingSecondPrimary.routingTable(project1).shardRoutingTable("index-1", 0);
|
final var index1RoutingTable = stateWithInitializingSecondPrimary.routingTable(project1).shardRoutingTable("index-1", 0);
|
||||||
assertTrue(index1RoutingTable.primaryShard().initializing());
|
assertTrue(index1RoutingTable.primaryShard().initializing());
|
||||||
assertTrue(index1RoutingTable.replicaShards().stream().allMatch(ShardRouting::unassigned));
|
assertTrue(index1RoutingTable.replicaShards().stream().allMatch(ShardRouting::unassigned));
|
||||||
|
assertNotNull(allocationStats.get());
|
||||||
|
assertEquals(new DesiredBalanceMetrics.AllocationStats(2, 2, 0), allocationStats.get());
|
||||||
}
|
}
|
||||||
|
|
||||||
final var stateWithStartedPrimariesAndInitializingReplica = startInitializingShardsAndReroute(
|
final var stateWithStartedPrimariesAndInitializingReplica = startInitializingShardsAndReroute(
|
||||||
|
@ -305,6 +312,8 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase {
|
||||||
.shardRoutingTable("index-1", 0);
|
.shardRoutingTable("index-1", 0);
|
||||||
assertTrue(index1RoutingTable.primaryShard().started());
|
assertTrue(index1RoutingTable.primaryShard().started());
|
||||||
assertTrue(index1RoutingTable.replicaShards().stream().allMatch(ShardRouting::unassigned));
|
assertTrue(index1RoutingTable.replicaShards().stream().allMatch(ShardRouting::unassigned));
|
||||||
|
assertNotNull(allocationStats.get());
|
||||||
|
assertEquals(new DesiredBalanceMetrics.AllocationStats(1, 3, 0), allocationStats.get());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -834,6 +843,9 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMoveShards() {
|
public void testMoveShards() {
|
||||||
|
/**
|
||||||
|
* Set up 4 nodes and an index of 3 shards with 1 replica each (6 shard copies).
|
||||||
|
*/
|
||||||
final var discoveryNodes = discoveryNodes(4);
|
final var discoveryNodes = discoveryNodes(4);
|
||||||
final var metadata = Metadata.builder();
|
final var metadata = Metadata.builder();
|
||||||
final var routingTable = RoutingTable.builder(TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY);
|
final var routingTable = RoutingTable.builder(TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY);
|
||||||
|
@ -858,11 +870,13 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase {
|
||||||
.build();
|
.build();
|
||||||
final var clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
|
final var clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
|
||||||
|
|
||||||
|
// Set up overriding AllocationDecider#canAllocate decisions for a shard.
|
||||||
final var canAllocateRef = new AtomicReference<>(Decision.YES);
|
final var canAllocateRef = new AtomicReference<>(Decision.YES);
|
||||||
|
|
||||||
final var desiredBalance = new AtomicReference<>(desiredBalance(clusterState, (shardId, nodeId) -> true));
|
final var desiredBalance = new AtomicReference<>(desiredBalance(clusterState, (shardId, nodeId) -> true));
|
||||||
|
AtomicReference<DesiredBalanceMetrics.AllocationStats> allocationStats = new AtomicReference<>();
|
||||||
final var allocationService = createTestAllocationService(
|
final var allocationService = createTestAllocationService(
|
||||||
routingAllocation -> reconcile(routingAllocation, desiredBalance.get()),
|
routingAllocation -> reconcile(routingAllocation, desiredBalance.get(), allocationStats),
|
||||||
new SameShardAllocationDecider(clusterSettings),
|
new SameShardAllocationDecider(clusterSettings),
|
||||||
new ReplicaAfterPrimaryActiveAllocationDecider(),
|
new ReplicaAfterPrimaryActiveAllocationDecider(),
|
||||||
new ThrottlingAllocationDecider(clusterSettings),
|
new ThrottlingAllocationDecider(clusterSettings),
|
||||||
|
@ -892,7 +906,10 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase {
|
||||||
assertTrue(shardRouting.started());
|
assertTrue(shardRouting.started());
|
||||||
assertThat(shardRouting.currentNodeId(), oneOf("node-0", "node-1"));
|
assertThat(shardRouting.currentNodeId(), oneOf("node-0", "node-1"));
|
||||||
}
|
}
|
||||||
|
assertNotNull(allocationStats);
|
||||||
|
assertEquals(new DesiredBalanceMetrics.AllocationStats(0, 6, 0), allocationStats.get());
|
||||||
|
|
||||||
|
// Only allow allocation on two of the nodes, excluding the other two nodes.
|
||||||
clusterSettings.applySettings(
|
clusterSettings.applySettings(
|
||||||
Settings.builder()
|
Settings.builder()
|
||||||
.putList(
|
.putList(
|
||||||
|
@ -905,6 +922,8 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase {
|
||||||
|
|
||||||
assertSame(clusterState, allocationService.reroute(clusterState, "test", ActionListener.noop())); // all still on desired nodes, no
|
assertSame(clusterState, allocationService.reroute(clusterState, "test", ActionListener.noop())); // all still on desired nodes, no
|
||||||
// movement needed
|
// movement needed
|
||||||
|
assertNotNull(allocationStats);
|
||||||
|
assertEquals(new DesiredBalanceMetrics.AllocationStats(0, 6, 0), allocationStats.get());
|
||||||
|
|
||||||
desiredBalance.set(desiredBalance(clusterState, (shardId, nodeId) -> nodeId.equals("node-2") || nodeId.equals("node-3")));
|
desiredBalance.set(desiredBalance(clusterState, (shardId, nodeId) -> nodeId.equals("node-2") || nodeId.equals("node-3")));
|
||||||
|
|
||||||
|
@ -913,10 +932,14 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase {
|
||||||
final var reroutedState = allocationService.reroute(clusterState, "test", ActionListener.noop());
|
final var reroutedState = allocationService.reroute(clusterState, "test", ActionListener.noop());
|
||||||
assertThat(reroutedState.getRoutingNodes().node("node-0").numberOfShardsWithState(ShardRoutingState.RELOCATING), equalTo(1));
|
assertThat(reroutedState.getRoutingNodes().node("node-0").numberOfShardsWithState(ShardRoutingState.RELOCATING), equalTo(1));
|
||||||
assertThat(reroutedState.getRoutingNodes().node("node-1").numberOfShardsWithState(ShardRoutingState.RELOCATING), equalTo(1));
|
assertThat(reroutedState.getRoutingNodes().node("node-1").numberOfShardsWithState(ShardRoutingState.RELOCATING), equalTo(1));
|
||||||
|
assertNotNull(allocationStats);
|
||||||
|
// Total allocations counts relocating and intializing shards, so the two relocating shards will be counted twice.
|
||||||
|
assertEquals(new DesiredBalanceMetrics.AllocationStats(0, 8, 4), allocationStats.get());
|
||||||
|
|
||||||
// Ensuring that we check the shortcut two-param canAllocate() method up front
|
// Ensuring that we check the shortcut two-param canAllocate() method up front
|
||||||
canAllocateRef.set(Decision.NO);
|
canAllocateRef.set(Decision.NO);
|
||||||
assertSame(clusterState, allocationService.reroute(clusterState, "test", ActionListener.noop()));
|
assertSame(clusterState, allocationService.reroute(clusterState, "test", ActionListener.noop()));
|
||||||
|
assertEquals(new DesiredBalanceMetrics.AllocationStats(0, 6, 6), allocationStats.get());
|
||||||
canAllocateRef.set(Decision.YES);
|
canAllocateRef.set(Decision.YES);
|
||||||
|
|
||||||
// Restore filter to default
|
// Restore filter to default
|
||||||
|
@ -954,6 +977,8 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase {
|
||||||
"test",
|
"test",
|
||||||
ActionListener.noop()
|
ActionListener.noop()
|
||||||
);
|
);
|
||||||
|
assertEquals(new DesiredBalanceMetrics.AllocationStats(0, 7, 3), allocationStats.get());
|
||||||
|
|
||||||
assertThat(shuttingDownState.getRoutingNodes().node("node-2").numberOfShardsWithState(ShardRoutingState.INITIALIZING), equalTo(1));
|
assertThat(shuttingDownState.getRoutingNodes().node("node-2").numberOfShardsWithState(ShardRoutingState.INITIALIZING), equalTo(1));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -982,11 +1007,13 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase {
|
||||||
final var desiredBalance = new AtomicReference<>(
|
final var desiredBalance = new AtomicReference<>(
|
||||||
desiredBalance(clusterState, (shardId, nodeId) -> nodeId.equals("node-0") || nodeId.equals("node-1"))
|
desiredBalance(clusterState, (shardId, nodeId) -> nodeId.equals("node-0") || nodeId.equals("node-1"))
|
||||||
);
|
);
|
||||||
|
AtomicReference<DesiredBalanceMetrics.AllocationStats> allocationStats = new AtomicReference<>();
|
||||||
final var allocationService = createTestAllocationService(
|
final var allocationService = createTestAllocationService(
|
||||||
routingAllocation -> reconcile(routingAllocation, desiredBalance.get()),
|
routingAllocation -> reconcile(routingAllocation, desiredBalance.get(), allocationStats),
|
||||||
new SameShardAllocationDecider(clusterSettings),
|
new SameShardAllocationDecider(clusterSettings),
|
||||||
new ReplicaAfterPrimaryActiveAllocationDecider(),
|
new ReplicaAfterPrimaryActiveAllocationDecider(),
|
||||||
new ThrottlingAllocationDecider(clusterSettings),
|
new ThrottlingAllocationDecider(clusterSettings),
|
||||||
|
new ConcurrentRebalanceAllocationDecider(clusterSettings),
|
||||||
new AllocationDecider() {
|
new AllocationDecider() {
|
||||||
@Override
|
@Override
|
||||||
public Decision canRebalance(RoutingAllocation allocation) {
|
public Decision canRebalance(RoutingAllocation allocation) {
|
||||||
|
@ -1016,24 +1043,28 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase {
|
||||||
assertThat(shardRouting.currentNodeId(), oneOf("node-0", "node-1"));
|
assertThat(shardRouting.currentNodeId(), oneOf("node-0", "node-1"));
|
||||||
}
|
}
|
||||||
|
|
||||||
assertSame(clusterState, allocationService.reroute(clusterState, "test", ActionListener.noop())); // all still on desired nodes, no
|
// All still on desired nodes, no movement needed, cluster state remains the same.
|
||||||
// movement needed
|
assertSame(clusterState, allocationService.reroute(clusterState, "test", ActionListener.noop()));
|
||||||
|
assertEquals(new DesiredBalanceMetrics.AllocationStats(0, 6, 0), allocationStats.get());
|
||||||
|
|
||||||
desiredBalance.set(desiredBalance(clusterState, (shardId, nodeId) -> nodeId.equals("node-2") || nodeId.equals("node-3")));
|
desiredBalance.set(desiredBalance(clusterState, (shardId, nodeId) -> nodeId.equals("node-2") || nodeId.equals("node-3")));
|
||||||
|
|
||||||
canRebalanceGlobalRef.set(Decision.NO);
|
canRebalanceGlobalRef.set(Decision.NO);
|
||||||
assertSame(clusterState, allocationService.reroute(clusterState, "test", ActionListener.noop())); // rebalancing forbidden on all
|
// rebalancing forbidden on all shards, no movement allowed, cluster state remains the same.
|
||||||
// shards, no movement
|
assertSame(clusterState, allocationService.reroute(clusterState, "test", ActionListener.noop()));
|
||||||
|
// assertEquals(new DesiredBalanceMetrics.AllocationStats(0, 6, 6), allocationStats.get());
|
||||||
canRebalanceGlobalRef.set(Decision.YES);
|
canRebalanceGlobalRef.set(Decision.YES);
|
||||||
|
|
||||||
canRebalanceShardRef.set(Decision.NO);
|
canRebalanceShardRef.set(Decision.NO);
|
||||||
assertSame(clusterState, allocationService.reroute(clusterState, "test", ActionListener.noop())); // rebalancing forbidden on
|
// rebalancing forbidden on specific shards, still no movement.
|
||||||
// specific shards, no movement
|
assertSame(clusterState, allocationService.reroute(clusterState, "test", ActionListener.noop()));
|
||||||
|
// assertEquals(new DesiredBalanceMetrics.AllocationStats(0, 6, 6), allocationStats.get());
|
||||||
canRebalanceShardRef.set(Decision.YES);
|
canRebalanceShardRef.set(Decision.YES);
|
||||||
|
|
||||||
canAllocateShardRef.set(Decision.NO);
|
canAllocateShardRef.set(Decision.NO);
|
||||||
assertSame(clusterState, allocationService.reroute(clusterState, "test", ActionListener.noop())); // allocation not possible, no
|
// allocation not possible, no movement
|
||||||
// movement
|
assertSame(clusterState, allocationService.reroute(clusterState, "test", ActionListener.noop()));
|
||||||
|
// assertEquals(new DesiredBalanceMetrics.AllocationStats(0, 6, 6), allocationStats.get());
|
||||||
canAllocateShardRef.set(Decision.YES);
|
canAllocateShardRef.set(Decision.YES);
|
||||||
|
|
||||||
// The next reroute starts moving shards to node-2 and node-3, but interleaves the decisions between node-0 and node-1 for fairness.
|
// The next reroute starts moving shards to node-2 and node-3, but interleaves the decisions between node-0 and node-1 for fairness.
|
||||||
|
@ -1041,6 +1072,16 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase {
|
||||||
final var reroutedState = allocationService.reroute(clusterState, "test", ActionListener.noop());
|
final var reroutedState = allocationService.reroute(clusterState, "test", ActionListener.noop());
|
||||||
assertThat(reroutedState.getRoutingNodes().node("node-0").numberOfShardsWithState(ShardRoutingState.RELOCATING), equalTo(1));
|
assertThat(reroutedState.getRoutingNodes().node("node-0").numberOfShardsWithState(ShardRoutingState.RELOCATING), equalTo(1));
|
||||||
assertThat(reroutedState.getRoutingNodes().node("node-1").numberOfShardsWithState(ShardRoutingState.RELOCATING), equalTo(1));
|
assertThat(reroutedState.getRoutingNodes().node("node-1").numberOfShardsWithState(ShardRoutingState.RELOCATING), equalTo(1));
|
||||||
|
assertNotNull(allocationStats.get());
|
||||||
|
assertEquals(new DesiredBalanceMetrics.AllocationStats(0, 6, 6), allocationStats.get());
|
||||||
|
|
||||||
|
// Test that the AllocationStats are still updated, even though throttling is active. The cluster state should remain unchanged
|
||||||
|
// because due to throttling: the previous reroute request started relocating two shards and, since those reallocations have not
|
||||||
|
// been completed, no additional shard relocations can begin.
|
||||||
|
assertSame(reroutedState, allocationService.reroute(reroutedState, "test", ActionListener.noop()));
|
||||||
|
assertNotNull(allocationStats);
|
||||||
|
// Note: total allocations counts relocating and intializing shards, so the two relocating shards will be counted twice.
|
||||||
|
assertEquals(new DesiredBalanceMetrics.AllocationStats(0, 8, 4), allocationStats.get());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testDoNotRebalanceToTheNodeThatNoLongerExists() {
|
public void testDoNotRebalanceToTheNodeThatNoLongerExists() {
|
||||||
|
@ -1245,12 +1286,14 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase {
|
||||||
while (true) {
|
while (true) {
|
||||||
|
|
||||||
var allocation = createRoutingAllocationFrom(clusterState, deciders);
|
var allocation = createRoutingAllocationFrom(clusterState, deciders);
|
||||||
reconciler.reconcile(balance, allocation);
|
var allocationStats = reconciler.reconcile(balance, allocation);
|
||||||
|
|
||||||
var initializing = shardsWithState(allocation.routingNodes(), ShardRoutingState.INITIALIZING);
|
var initializing = shardsWithState(allocation.routingNodes(), ShardRoutingState.INITIALIZING);
|
||||||
if (initializing.isEmpty()) {
|
if (initializing.isEmpty()) {
|
||||||
|
assertEquals(new DesiredBalanceMetrics.AllocationStats(0, shardsPerNode * numberOfNodes, 0), allocationStats);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
for (ShardRouting shardRouting : initializing) {
|
for (ShardRouting shardRouting : initializing) {
|
||||||
totalOutgoingMoves.get(shardRouting.relocatingNodeId()).incrementAndGet();
|
totalOutgoingMoves.get(shardRouting.relocatingNodeId()).incrementAndGet();
|
||||||
allocation.routingNodes().startShard(shardRouting, allocation.changes(), 0L);
|
allocation.routingNodes().startShard(shardRouting, allocation.changes(), 0L);
|
||||||
|
@ -1366,11 +1409,24 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void reconcile(RoutingAllocation routingAllocation, DesiredBalance desiredBalance) {
|
private static void reconcile(RoutingAllocation routingAllocation, DesiredBalance desiredBalance) {
|
||||||
final var threadPool = mock(ThreadPool.class);
|
reconcile(routingAllocation, desiredBalance, ALLOCATION_STATS_PLACEHOLDER);
|
||||||
when(threadPool.relativeTimeInMillisSupplier()).thenReturn(new AtomicLong()::incrementAndGet);
|
|
||||||
new DesiredBalanceReconciler(createBuiltInClusterSettings(), threadPool).reconcile(desiredBalance, routingAllocation);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static void reconcile(
|
||||||
|
RoutingAllocation routingAllocation,
|
||||||
|
DesiredBalance desiredBalance,
|
||||||
|
AtomicReference<DesiredBalanceMetrics.AllocationStats> allocationStatsAtomicReference
|
||||||
|
) {
|
||||||
|
final var threadPool = mock(ThreadPool.class);
|
||||||
|
when(threadPool.relativeTimeInMillisSupplier()).thenReturn(new AtomicLong()::incrementAndGet);
|
||||||
|
allocationStatsAtomicReference.set(
|
||||||
|
new DesiredBalanceReconciler(createBuiltInClusterSettings(), threadPool).reconcile(desiredBalance, routingAllocation)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns whether the node's shards are all desired assignments.
|
||||||
|
*/
|
||||||
private static boolean isReconciled(RoutingNode node, DesiredBalance balance) {
|
private static boolean isReconciled(RoutingNode node, DesiredBalance balance) {
|
||||||
for (ShardRouting shardRouting : node) {
|
for (ShardRouting shardRouting : node) {
|
||||||
if (balance.assignments().get(shardRouting.shardId()).nodeIds().contains(node.nodeId()) == false) {
|
if (balance.assignments().get(shardRouting.shardId()).nodeIds().contains(node.nodeId()) == false) {
|
||||||
|
@ -1512,6 +1568,10 @@ public class DesiredBalanceReconcilerTests extends ESAllocationTestCase {
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Settings that limit concurrency on each node to: a single primary shard recovery from local disk; a single shard move as a source
|
||||||
|
* node; a single shard move as the destination node.
|
||||||
|
*/
|
||||||
private static Settings throttleSettings() {
|
private static Settings throttleSettings() {
|
||||||
return Settings.builder()
|
return Settings.builder()
|
||||||
.put(CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING.getKey(), 1)
|
.put(CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING.getKey(), 1)
|
||||||
|
|
27
test/fixtures/hdfs-fixture/build.gradle
vendored
27
test/fixtures/hdfs-fixture/build.gradle
vendored
|
@ -13,13 +13,36 @@ apply plugin: 'com.gradleup.shadow'
|
||||||
|
|
||||||
import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar
|
import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar
|
||||||
|
|
||||||
|
def patched = Attribute.of('patched', Boolean)
|
||||||
configurations {
|
configurations {
|
||||||
hdfs2
|
hdfs2 {
|
||||||
hdfs3
|
attributes {
|
||||||
|
attribute(patched, true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
hdfs3 {
|
||||||
|
attributes {
|
||||||
|
attribute(patched, true)
|
||||||
|
}
|
||||||
|
}
|
||||||
consumable("shadowedHdfs2")
|
consumable("shadowedHdfs2")
|
||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
|
attributesSchema {
|
||||||
|
attribute(patched)
|
||||||
|
}
|
||||||
|
artifactTypes.getByName("jar") {
|
||||||
|
attributes.attribute(patched, false)
|
||||||
|
}
|
||||||
|
registerTransform(org.elasticsearch.gradle.internal.dependencies.patches.hdfs.HdfsClassPatcher) {
|
||||||
|
from.attribute(patched, false)
|
||||||
|
to.attribute(patched, true)
|
||||||
|
parameters {
|
||||||
|
matchingArtifacts = ["hadoop-common"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
compileOnly("org.apache.hadoop:hadoop-minicluster:2.8.5")
|
compileOnly("org.apache.hadoop:hadoop-minicluster:2.8.5")
|
||||||
api("com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}") {
|
api("com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}") {
|
||||||
transitive = false
|
transitive = false
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
org.elasticsearch.blobcache:
|
org.elasticsearch.blobcache:
|
||||||
- files:
|
- files:
|
||||||
- relative_path: "shared_snapshot_cache"
|
- relative_path: ""
|
||||||
relative_to: "data"
|
relative_to: data
|
||||||
mode: "read_write"
|
mode: read_write
|
||||||
|
|
|
@ -49,10 +49,15 @@ public class DeprecatedIndexPredicate {
|
||||||
*/
|
*/
|
||||||
public static boolean reindexRequired(IndexMetadata indexMetadata, boolean filterToBlockedStatus) {
|
public static boolean reindexRequired(IndexMetadata indexMetadata, boolean filterToBlockedStatus) {
|
||||||
return creationVersionBeforeMinimumWritableVersion(indexMetadata)
|
return creationVersionBeforeMinimumWritableVersion(indexMetadata)
|
||||||
|
&& isNotSystem(indexMetadata)
|
||||||
&& isNotSearchableSnapshot(indexMetadata)
|
&& isNotSearchableSnapshot(indexMetadata)
|
||||||
&& matchBlockedStatus(indexMetadata, filterToBlockedStatus);
|
&& matchBlockedStatus(indexMetadata, filterToBlockedStatus);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static boolean isNotSystem(IndexMetadata indexMetadata) {
|
||||||
|
return indexMetadata.isSystem() == false;
|
||||||
|
}
|
||||||
|
|
||||||
private static boolean isNotSearchableSnapshot(IndexMetadata indexMetadata) {
|
private static boolean isNotSearchableSnapshot(IndexMetadata indexMetadata) {
|
||||||
return indexMetadata.isSearchableSnapshot() == false;
|
return indexMetadata.isSearchableSnapshot() == false;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,13 @@
|
||||||
|
org.elasticsearch.xcore:
|
||||||
|
- files:
|
||||||
|
- relative_path: ""
|
||||||
|
relative_to: config
|
||||||
|
mode: read
|
||||||
|
org.elasticsearch.sslconfig:
|
||||||
|
- files:
|
||||||
|
- relative_path: ""
|
||||||
|
relative_to: config
|
||||||
|
mode: read
|
||||||
org.apache.httpcomponents.httpclient:
|
org.apache.httpcomponents.httpclient:
|
||||||
- outbound_network # For SamlRealm
|
- outbound_network # For SamlRealm
|
||||||
- manage_threads
|
- manage_threads
|
||||||
|
|
|
@ -72,6 +72,7 @@ public class DataStreamDeprecationChecker implements ResourceDeprecationChecker
|
||||||
Map<String, List<DeprecationIssue>> dataStreamIssues = new HashMap<>();
|
Map<String, List<DeprecationIssue>> dataStreamIssues = new HashMap<>();
|
||||||
for (String dataStreamName : dataStreamNames) {
|
for (String dataStreamName : dataStreamNames) {
|
||||||
DataStream dataStream = clusterState.metadata().getProject().dataStreams().get(dataStreamName);
|
DataStream dataStream = clusterState.metadata().getProject().dataStreams().get(dataStreamName);
|
||||||
|
if (dataStream.isSystem() == false) {
|
||||||
List<DeprecationIssue> issuesForSingleDataStream = DATA_STREAM_CHECKS.stream()
|
List<DeprecationIssue> issuesForSingleDataStream = DATA_STREAM_CHECKS.stream()
|
||||||
.map(c -> c.apply(dataStream, clusterState))
|
.map(c -> c.apply(dataStream, clusterState))
|
||||||
.filter(Objects::nonNull)
|
.filter(Objects::nonNull)
|
||||||
|
@ -80,6 +81,7 @@ public class DataStreamDeprecationChecker implements ResourceDeprecationChecker
|
||||||
dataStreamIssues.put(dataStreamName, issuesForSingleDataStream);
|
dataStreamIssues.put(dataStreamName, issuesForSingleDataStream);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
return dataStreamIssues.isEmpty() ? Map.of() : dataStreamIssues;
|
return dataStreamIssues.isEmpty() ? Map.of() : dataStreamIssues;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -302,4 +302,52 @@ public class DataStreamDeprecationCheckerTests extends ESTestCase {
|
||||||
assertThat(issuesByDataStream.get(dataStream.getName()), equalTo(List.of(expected)));
|
assertThat(issuesByDataStream.get(dataStream.getName()), equalTo(List.of(expected)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testOldSystemDataStreamIgnored() {
|
||||||
|
// We do not want system data streams coming back in the deprecation info API
|
||||||
|
int oldIndexCount = randomIntBetween(1, 100);
|
||||||
|
int newIndexCount = randomIntBetween(1, 100);
|
||||||
|
List<Index> allIndices = new ArrayList<>();
|
||||||
|
Map<String, IndexMetadata> nameToIndexMetadata = new HashMap<>();
|
||||||
|
for (int i = 0; i < oldIndexCount; i++) {
|
||||||
|
Settings.Builder settings = settings(IndexVersion.fromId(7170099));
|
||||||
|
|
||||||
|
String indexName = "old-data-stream-index-" + i;
|
||||||
|
settings.put(MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.getKey(), true);
|
||||||
|
|
||||||
|
IndexMetadata oldIndexMetadata = IndexMetadata.builder(indexName)
|
||||||
|
.settings(settings)
|
||||||
|
.numberOfShards(1)
|
||||||
|
.numberOfReplicas(0)
|
||||||
|
.build();
|
||||||
|
allIndices.add(oldIndexMetadata.getIndex());
|
||||||
|
nameToIndexMetadata.put(oldIndexMetadata.getIndex().getName(), oldIndexMetadata);
|
||||||
|
}
|
||||||
|
for (int i = 0; i < newIndexCount; i++) {
|
||||||
|
Index newIndex = createNewIndex(i, false, nameToIndexMetadata);
|
||||||
|
allIndices.add(newIndex);
|
||||||
|
}
|
||||||
|
DataStream dataStream = new DataStream(
|
||||||
|
randomAlphaOfLength(10),
|
||||||
|
allIndices,
|
||||||
|
randomNegativeLong(),
|
||||||
|
Map.of(),
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
randomBoolean(),
|
||||||
|
randomFrom(IndexMode.values()),
|
||||||
|
null,
|
||||||
|
randomFrom(DataStreamOptions.EMPTY, DataStreamOptions.FAILURE_STORE_DISABLED, DataStreamOptions.FAILURE_STORE_ENABLED, null),
|
||||||
|
List.of(),
|
||||||
|
randomBoolean(),
|
||||||
|
null
|
||||||
|
);
|
||||||
|
Metadata metadata = Metadata.builder()
|
||||||
|
.indices(nameToIndexMetadata)
|
||||||
|
.dataStreams(Map.of(dataStream.getName(), dataStream), Map.of())
|
||||||
|
.build();
|
||||||
|
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build();
|
||||||
|
assertThat(checker.check(clusterState), equalTo(Map.of()));
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -298,6 +298,28 @@ public class IndexDeprecationCheckerTests extends ESTestCase {
|
||||||
assertEquals(List.of(expected), issuesByIndex.get("test"));
|
assertEquals(List.of(expected), issuesByIndex.get("test"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testOldSystemIndicesIgnored() {
|
||||||
|
// We do not want system indices coming back in the deprecation info API
|
||||||
|
Settings.Builder settings = settings(OLD_VERSION).put(MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.getKey(), true);
|
||||||
|
IndexMetadata indexMetadata = IndexMetadata.builder("test")
|
||||||
|
.system(true)
|
||||||
|
.settings(settings)
|
||||||
|
.numberOfShards(1)
|
||||||
|
.numberOfReplicas(0)
|
||||||
|
.state(indexMetdataState)
|
||||||
|
.build();
|
||||||
|
ClusterState clusterState = ClusterState.builder(ClusterState.EMPTY_STATE)
|
||||||
|
.metadata(Metadata.builder().put(indexMetadata, true))
|
||||||
|
.blocks(clusterBlocksForIndices(indexMetadata))
|
||||||
|
.build();
|
||||||
|
Map<String, List<DeprecationIssue>> issuesByIndex = checker.check(
|
||||||
|
clusterState,
|
||||||
|
new DeprecationInfoAction.Request(TimeValue.THIRTY_SECONDS),
|
||||||
|
emptyPrecomputedData
|
||||||
|
);
|
||||||
|
assertThat(issuesByIndex, equalTo(Map.of()));
|
||||||
|
}
|
||||||
|
|
||||||
private IndexMetadata readonlyIndexMetadata(String indexName, IndexVersion indexVersion) {
|
private IndexMetadata readonlyIndexMetadata(String indexName, IndexVersion indexVersion) {
|
||||||
Settings.Builder settings = settings(indexVersion).put(MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.getKey(), true);
|
Settings.Builder settings = settings(indexVersion).put(MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.getKey(), true);
|
||||||
return IndexMetadata.builder(indexName).settings(settings).numberOfShards(1).numberOfReplicas(0).state(indexMetdataState).build();
|
return IndexMetadata.builder(indexName).settings(settings).numberOfShards(1).numberOfReplicas(0).state(indexMetdataState).build();
|
||||||
|
|
|
@ -3,3 +3,7 @@ ALL-UNNAMED:
|
||||||
- write_system_properties:
|
- write_system_properties:
|
||||||
properties:
|
properties:
|
||||||
- org.apache.xml.security.ignoreLineBreaks
|
- org.apache.xml.security.ignoreLineBreaks
|
||||||
|
- files:
|
||||||
|
- relative_path: ""
|
||||||
|
relative_to: config
|
||||||
|
mode: read
|
||||||
|
|
|
@ -54,6 +54,7 @@ import org.elasticsearch.xpack.inference.services.elastic.authorization.ElasticI
|
||||||
import org.elasticsearch.xpack.inference.services.elastic.completion.ElasticInferenceServiceCompletionModel;
|
import org.elasticsearch.xpack.inference.services.elastic.completion.ElasticInferenceServiceCompletionModel;
|
||||||
import org.elasticsearch.xpack.inference.services.elastic.completion.ElasticInferenceServiceCompletionServiceSettings;
|
import org.elasticsearch.xpack.inference.services.elastic.completion.ElasticInferenceServiceCompletionServiceSettings;
|
||||||
import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings;
|
import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings;
|
||||||
|
import org.elasticsearch.xpack.inference.services.validation.ModelValidatorBuilder;
|
||||||
import org.elasticsearch.xpack.inference.telemetry.TraceContext;
|
import org.elasticsearch.xpack.inference.telemetry.TraceContext;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
@ -557,11 +558,8 @@ public class ElasticInferenceService extends SenderService {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void checkModelConfig(Model model, ActionListener<Model> listener) {
|
public void checkModelConfig(Model model, ActionListener<Model> listener) {
|
||||||
if (model instanceof ElasticInferenceServiceSparseEmbeddingsModel embeddingsModel) {
|
// TODO: Remove this function once all services have been updated to use the new model validators
|
||||||
listener.onResponse(updateModelWithEmbeddingDetails(embeddingsModel));
|
ModelValidatorBuilder.buildModelValidator(model.getTaskType()).validate(this, model, listener);
|
||||||
} else {
|
|
||||||
listener.onResponse(model);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static List<ChunkedInference> translateToChunkedResults(InferenceInputs inputs, InferenceServiceResults inferenceResults) {
|
private static List<ChunkedInference> translateToChunkedResults(InferenceInputs inputs, InferenceServiceResults inferenceResults) {
|
||||||
|
@ -576,18 +574,6 @@ public class ElasticInferenceService extends SenderService {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private ElasticInferenceServiceSparseEmbeddingsModel updateModelWithEmbeddingDetails(
|
|
||||||
ElasticInferenceServiceSparseEmbeddingsModel model
|
|
||||||
) {
|
|
||||||
ElasticInferenceServiceSparseEmbeddingsServiceSettings serviceSettings = new ElasticInferenceServiceSparseEmbeddingsServiceSettings(
|
|
||||||
model.getServiceSettings().modelId(),
|
|
||||||
model.getServiceSettings().maxInputTokens(),
|
|
||||||
model.getServiceSettings().rateLimitSettings()
|
|
||||||
);
|
|
||||||
|
|
||||||
return new ElasticInferenceServiceSparseEmbeddingsModel(model, serviceSettings);
|
|
||||||
}
|
|
||||||
|
|
||||||
private TraceContext getCurrentTraceInfo() {
|
private TraceContext getCurrentTraceInfo() {
|
||||||
var threadPool = getServiceComponents().threadPool();
|
var threadPool = getServiceComponents().threadPool();
|
||||||
|
|
||||||
|
|
|
@ -35,7 +35,6 @@ public class ElasticInferenceServiceCompletionServiceSettings extends FilteredXC
|
||||||
|
|
||||||
public static final String NAME = "elastic_inference_service_completion_service_settings";
|
public static final String NAME = "elastic_inference_service_completion_service_settings";
|
||||||
|
|
||||||
// TODO what value do we put here?
|
|
||||||
private static final RateLimitSettings DEFAULT_RATE_LIMIT_SETTINGS = new RateLimitSettings(720L);
|
private static final RateLimitSettings DEFAULT_RATE_LIMIT_SETTINGS = new RateLimitSettings(720L);
|
||||||
|
|
||||||
public static ElasticInferenceServiceCompletionServiceSettings fromMap(Map<String, Object> map, ConfigurationParseContext context) {
|
public static ElasticInferenceServiceCompletionServiceSettings fromMap(Map<String, Object> map, ConfigurationParseContext context) {
|
||||||
|
|
|
@ -317,7 +317,21 @@ public class ElasticInferenceServiceTests extends ESTestCase {
|
||||||
|
|
||||||
public void testCheckModelConfig_ReturnsNewModelReference() throws IOException {
|
public void testCheckModelConfig_ReturnsNewModelReference() throws IOException {
|
||||||
var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager);
|
var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager);
|
||||||
|
|
||||||
try (var service = createService(senderFactory, getUrl(webServer))) {
|
try (var service = createService(senderFactory, getUrl(webServer))) {
|
||||||
|
String responseJson = """
|
||||||
|
{
|
||||||
|
"data": [
|
||||||
|
{
|
||||||
|
"hello": 2.1259406,
|
||||||
|
"greet": 1.7073475
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
""";
|
||||||
|
|
||||||
|
webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson));
|
||||||
|
|
||||||
var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer), "my-model-id");
|
var model = ElasticInferenceServiceSparseEmbeddingsModelTests.createModel(getUrl(webServer), "my-model-id");
|
||||||
PlainActionFuture<Model> listener = new PlainActionFuture<>();
|
PlainActionFuture<Model> listener = new PlainActionFuture<>();
|
||||||
service.checkModelConfig(model, listener);
|
service.checkModelConfig(model, listener);
|
||||||
|
|
|
@ -174,7 +174,7 @@ public class GetMigrationReindexStatusTransportAction extends HandledTransportAc
|
||||||
if (sourceIndexStats == null) {
|
if (sourceIndexStats == null) {
|
||||||
totalDocsInIndex = 0;
|
totalDocsInIndex = 0;
|
||||||
} else {
|
} else {
|
||||||
DocsStats totalDocsStats = sourceIndexStats.getTotal().getDocs();
|
DocsStats totalDocsStats = sourceIndexStats.getPrimaries().getDocs();
|
||||||
totalDocsInIndex = totalDocsStats == null ? 0 : totalDocsStats.getCount();
|
totalDocsInIndex = totalDocsStats == null ? 0 : totalDocsStats.getCount();
|
||||||
}
|
}
|
||||||
IndexStats migratedIndexStats = indicesStatsResponse.getIndex(
|
IndexStats migratedIndexStats = indicesStatsResponse.getIndex(
|
||||||
|
@ -184,7 +184,7 @@ public class GetMigrationReindexStatusTransportAction extends HandledTransportAc
|
||||||
if (migratedIndexStats == null) {
|
if (migratedIndexStats == null) {
|
||||||
reindexedDocsInIndex = 0;
|
reindexedDocsInIndex = 0;
|
||||||
} else {
|
} else {
|
||||||
DocsStats reindexedDocsStats = migratedIndexStats.getTotal().getDocs();
|
DocsStats reindexedDocsStats = migratedIndexStats.getPrimaries().getDocs();
|
||||||
reindexedDocsInIndex = reindexedDocsStats == null ? 0 : reindexedDocsStats.getCount();
|
reindexedDocsInIndex = reindexedDocsStats == null ? 0 : reindexedDocsStats.getCount();
|
||||||
}
|
}
|
||||||
inProgressMap.put(index, Tuple.tuple(totalDocsInIndex, reindexedDocsInIndex));
|
inProgressMap.put(index, Tuple.tuple(totalDocsInIndex, reindexedDocsInIndex));
|
||||||
|
|
|
@ -1,2 +1,9 @@
|
||||||
org.elasticsearch.ml:
|
org.elasticsearch.ml:
|
||||||
- manage_threads
|
- manage_threads
|
||||||
|
- files:
|
||||||
|
- relative_path: mlmodel.conf
|
||||||
|
relative_to: config
|
||||||
|
mode: read
|
||||||
|
- relative_path: "ml-local-data/"
|
||||||
|
relative_to: data
|
||||||
|
mode: read_write
|
||||||
|
|
|
@ -0,0 +1,8 @@
|
||||||
|
org.elasticsearch.searchablesnapshots:
|
||||||
|
- files:
|
||||||
|
- relative_path: snapshot_cache
|
||||||
|
relative_to: data
|
||||||
|
mode: read_write
|
||||||
|
- relative_path: indices
|
||||||
|
relative_to: data
|
||||||
|
mode: read_write
|
|
@ -1,5 +1,9 @@
|
||||||
org.elasticsearch.security:
|
org.elasticsearch.security:
|
||||||
- set_https_connection_properties # for CommandLineHttpClient
|
- set_https_connection_properties # for CommandLineHttpClient
|
||||||
|
- files:
|
||||||
|
- relative_path: ""
|
||||||
|
relative_to: config
|
||||||
|
mode: read
|
||||||
io.netty.transport:
|
io.netty.transport:
|
||||||
- manage_threads
|
- manage_threads
|
||||||
- inbound_network
|
- inbound_network
|
||||||
|
@ -13,6 +17,8 @@ io.netty.common:
|
||||||
mode: "read"
|
mode: "read"
|
||||||
- path: "/usr/lib/os-release"
|
- path: "/usr/lib/os-release"
|
||||||
mode: "read"
|
mode: "read"
|
||||||
|
- path: "/proc/sys/net/core/somaxconn"
|
||||||
|
mode: read
|
||||||
org.opensaml.xmlsec.impl:
|
org.opensaml.xmlsec.impl:
|
||||||
- write_system_properties:
|
- write_system_properties:
|
||||||
properties:
|
properties:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue