Merge revision 5c00341c2b into multi-project

This commit is contained in:
Tim Vernum 2025-02-14 17:17:41 +11:00
commit 680e7a6979
315 changed files with 11381 additions and 4339 deletions

View file

@ -261,7 +261,8 @@ public class ValuesSourceReaderBenchmark {
null, null,
false, false,
null, null,
null null,
false
).blockLoader(null); ).blockLoader(null);
} }

View file

@ -83,7 +83,7 @@ public class ScriptScoreBenchmark {
private final ScriptModule scriptModule = new ScriptModule(Settings.EMPTY, pluginsService.filterPlugins(ScriptPlugin.class).toList()); private final ScriptModule scriptModule = new ScriptModule(Settings.EMPTY, pluginsService.filterPlugins(ScriptPlugin.class).toList());
private final Map<String, MappedFieldType> fieldTypes = Map.ofEntries( private final Map<String, MappedFieldType> fieldTypes = Map.ofEntries(
Map.entry("n", new NumberFieldType("n", NumberType.LONG, false, false, true, true, null, Map.of(), null, false, null, null)) Map.entry("n", new NumberFieldType("n", NumberType.LONG, false, false, true, true, null, Map.of(), null, false, null, null, false))
); );
private final IndexFieldDataCache fieldDataCache = new IndexFieldDataCache.None(); private final IndexFieldDataCache fieldDataCache = new IndexFieldDataCache.None();
private final CircuitBreakerService breakerService = new NoneCircuitBreakerService(); private final CircuitBreakerService breakerService = new NoneCircuitBreakerService();

View file

@ -32,7 +32,9 @@ develocity {
// Automatically publish scans from Elasticsearch CI // Automatically publish scans from Elasticsearch CI
if (onCI) { if (onCI) {
publishing.onlyIf { true } publishing.onlyIf { true }
server = 'https://gradle-enterprise.elastic.co' if(server.isPresent() == false) {
server = 'https://gradle-enterprise.elastic.co'
}
} else if( server.isPresent() == false) { } else if( server.isPresent() == false) {
publishing.onlyIf { false } publishing.onlyIf { false }
} }

View file

@ -15,6 +15,7 @@ import com.github.javaparser.ast.NodeList;
import com.github.javaparser.ast.body.ClassOrInterfaceDeclaration; import com.github.javaparser.ast.body.ClassOrInterfaceDeclaration;
import com.github.javaparser.ast.body.FieldDeclaration; import com.github.javaparser.ast.body.FieldDeclaration;
import com.github.javaparser.ast.body.VariableDeclarator; import com.github.javaparser.ast.body.VariableDeclarator;
import com.github.javaparser.ast.expr.Expression;
import com.github.javaparser.ast.expr.NameExpr; import com.github.javaparser.ast.expr.NameExpr;
import com.github.javaparser.printer.lexicalpreservation.LexicalPreservingPrinter; import com.github.javaparser.printer.lexicalpreservation.LexicalPreservingPrinter;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
@ -33,6 +34,7 @@ import java.util.NavigableMap;
import java.util.Objects; import java.util.Objects;
import java.util.Optional; import java.util.Optional;
import java.util.TreeMap; import java.util.TreeMap;
import java.util.function.Function;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@ -51,6 +53,8 @@ public class UpdateVersionsTask extends AbstractVersionsTask {
private boolean setCurrent; private boolean setCurrent;
@Nullable @Nullable
private Version removeVersion; private Version removeVersion;
@Nullable
private String addTransportVersion;
@Inject @Inject
public UpdateVersionsTask(BuildLayout layout) { public UpdateVersionsTask(BuildLayout layout) {
@ -62,6 +66,11 @@ public class UpdateVersionsTask extends AbstractVersionsTask {
this.addVersion = Version.fromString(version); this.addVersion = Version.fromString(version);
} }
@Option(option = "add-transport-version", description = "Specifies transport version to add")
public void addTransportVersion(String transportVersion) {
this.addTransportVersion = transportVersion;
}
@Option(option = "set-current", description = "Set the 'current' constant to the new version") @Option(option = "set-current", description = "Set the 'current' constant to the new version")
public void setCurrent(boolean setCurrent) { public void setCurrent(boolean setCurrent) {
this.setCurrent = setCurrent; this.setCurrent = setCurrent;
@ -87,15 +96,18 @@ public class UpdateVersionsTask extends AbstractVersionsTask {
@TaskAction @TaskAction
public void executeTask() throws IOException { public void executeTask() throws IOException {
if (addVersion == null && removeVersion == null) { if (addVersion == null && removeVersion == null && addTransportVersion == null) {
throw new IllegalArgumentException("No versions to add or remove specified"); throw new IllegalArgumentException("No versions to add or remove specified");
} }
if (setCurrent && addVersion == null) { if (setCurrent && addVersion == null) {
throw new IllegalArgumentException("No new version added to set as the current version"); throw new IllegalArgumentException("No new version added to set as the current version");
} }
if (Objects.equals(addVersion, removeVersion)) { if (addVersion != null && removeVersion != null && Objects.equals(addVersion, removeVersion)) {
throw new IllegalArgumentException("Same version specified to add and remove"); throw new IllegalArgumentException("Same version specified to add and remove");
} }
if (addTransportVersion != null && addTransportVersion.split(":").length != 2) {
throw new IllegalArgumentException("Transport version specified must be in the format '<constant>:<version-id>'");
}
Path versionJava = rootDir.resolve(VERSION_FILE_PATH); Path versionJava = rootDir.resolve(VERSION_FILE_PATH);
CompilationUnit file = LexicalPreservingPrinter.setup(StaticJavaParser.parse(versionJava)); CompilationUnit file = LexicalPreservingPrinter.setup(StaticJavaParser.parse(versionJava));
@ -115,6 +127,18 @@ public class UpdateVersionsTask extends AbstractVersionsTask {
modifiedFile = removed; modifiedFile = removed;
} }
} }
if (addTransportVersion != null) {
var constant = addTransportVersion.split(":")[0];
var versionId = Integer.parseInt(addTransportVersion.split(":")[1]);
LOGGER.lifecycle("Adding transport version constant [{}] with id [{}]", constant, versionId);
var transportVersionsFile = rootDir.resolve(TRANSPORT_VERSIONS_FILE_PATH);
var transportVersions = LexicalPreservingPrinter.setup(StaticJavaParser.parse(transportVersionsFile));
var modified = addTransportVersionConstant(transportVersions, constant, versionId);
if (modified.isPresent()) {
writeOutNewContents(transportVersionsFile, modified.get());
}
}
if (modifiedFile.isPresent()) { if (modifiedFile.isPresent()) {
writeOutNewContents(versionJava, modifiedFile.get()); writeOutNewContents(versionJava, modifiedFile.get());
@ -161,6 +185,51 @@ public class UpdateVersionsTask extends AbstractVersionsTask {
return Optional.of(versionJava); return Optional.of(versionJava);
} }
@VisibleForTesting
static Optional<CompilationUnit> addTransportVersionConstant(CompilationUnit transportVersions, String constant, int versionId) {
ClassOrInterfaceDeclaration transportVersionsClass = transportVersions.getClassByName("TransportVersions").get();
if (transportVersionsClass.getFieldByName(constant).isPresent()) {
LOGGER.lifecycle("New transport version constant [{}] already present, skipping", constant);
return Optional.empty();
}
TreeMap<Integer, FieldDeclaration> versions = transportVersionsClass.getFields()
.stream()
.filter(f -> f.getElementType().asString().equals("TransportVersion"))
.filter(
f -> f.getVariables().stream().limit(1).allMatch(v -> v.getInitializer().filter(Expression::isMethodCallExpr).isPresent())
)
.filter(f -> f.getVariable(0).getInitializer().get().asMethodCallExpr().getNameAsString().endsWith("def"))
.collect(
Collectors.toMap(
f -> f.getVariable(0)
.getInitializer()
.get()
.asMethodCallExpr()
.getArgument(0)
.asIntegerLiteralExpr()
.asNumber()
.intValue(),
Function.identity(),
(f1, f2) -> {
throw new IllegalStateException("Duplicate version constant " + f1);
},
TreeMap::new
)
);
// find the version this should be inserted after
Map.Entry<Integer, FieldDeclaration> previousVersion = versions.lowerEntry(versionId);
if (previousVersion == null) {
throw new IllegalStateException(String.format("Could not find previous version to [%s]", versionId));
}
FieldDeclaration newTransportVersion = createNewTransportVersionConstant(previousVersion.getValue(), constant, versionId);
transportVersionsClass.getMembers().addAfter(newTransportVersion, previousVersion.getValue());
return Optional.of(transportVersions);
}
private static FieldDeclaration createNewVersionConstant(FieldDeclaration lastVersion, String newName, String newExpr) { private static FieldDeclaration createNewVersionConstant(FieldDeclaration lastVersion, String newName, String newExpr) {
return new FieldDeclaration( return new FieldDeclaration(
new NodeList<>(lastVersion.getModifiers()), new NodeList<>(lastVersion.getModifiers()),
@ -172,6 +241,29 @@ public class UpdateVersionsTask extends AbstractVersionsTask {
); );
} }
private static FieldDeclaration createNewTransportVersionConstant(FieldDeclaration lastVersion, String newName, int newId) {
return new FieldDeclaration(
new NodeList<>(lastVersion.getModifiers()),
new VariableDeclarator(
lastVersion.getCommonType(),
newName,
StaticJavaParser.parseExpression(String.format("def(%s)", formatTransportVersionId(newId)))
)
);
}
private static String formatTransportVersionId(int id) {
String idString = Integer.toString(id);
return new StringBuilder(idString.substring(idString.length() - 2, idString.length())).insert(0, "_")
.insert(0, idString.substring(idString.length() - 3, idString.length() - 2))
.insert(0, "_")
.insert(0, idString.substring(idString.length() - 6, idString.length() - 3))
.insert(0, "_")
.insert(0, idString.substring(0, idString.length() - 6))
.toString();
}
@VisibleForTesting @VisibleForTesting
static Optional<CompilationUnit> removeVersionConstant(CompilationUnit versionJava, Version version) { static Optional<CompilationUnit> removeVersionConstant(CompilationUnit versionJava, Version version) {
String removeFieldName = toVersionField(version); String removeFieldName = toVersionField(version);

View file

@ -239,6 +239,96 @@ public class UpdateVersionsTaskTests {
assertThat(field.isPresent(), is(false)); assertThat(field.isPresent(), is(false));
} }
@Test
public void addTransportVersion() throws Exception {
var transportVersions = """
public class TransportVersions {
public static final TransportVersion V_1_0_0 = def(1_000_0_00);
public static final TransportVersion V_1_1_0 = def(1_001_0_00);
public static final TransportVersion V_1_2_0 = def(1_002_0_00);
public static final TransportVersion V_1_2_1 = def(1_002_0_01);
public static final TransportVersion V_1_2_2 = def(1_002_0_02);
public static final TransportVersion SOME_OTHER_VERSION = def(1_003_0_00);
public static final TransportVersion YET_ANOTHER_VERSION = def(1_004_0_00);
public static final TransportVersion MINIMUM_COMPATIBLE = V_1_0_0;
}
""";
var expectedTransportVersions = """
public class TransportVersions {
public static final TransportVersion V_1_0_0 = def(1_000_0_00);
public static final TransportVersion V_1_1_0 = def(1_001_0_00);
public static final TransportVersion V_1_2_0 = def(1_002_0_00);
public static final TransportVersion V_1_2_1 = def(1_002_0_01);
public static final TransportVersion V_1_2_2 = def(1_002_0_02);
public static final TransportVersion SOME_OTHER_VERSION = def(1_003_0_00);
public static final TransportVersion YET_ANOTHER_VERSION = def(1_004_0_00);
public static final TransportVersion NEXT_TRANSPORT_VERSION = def(1_005_0_00);
public static final TransportVersion MINIMUM_COMPATIBLE = V_1_0_0;
}
""";
var unit = StaticJavaParser.parse(transportVersions);
var result = UpdateVersionsTask.addTransportVersionConstant(unit, "NEXT_TRANSPORT_VERSION", 1_005_0_00);
assertThat(result.isPresent(), is(true));
assertThat(result.get(), hasToString(expectedTransportVersions));
}
@Test
public void addTransportVersionPatch() throws Exception {
var transportVersions = """
public class TransportVersions {
public static final TransportVersion V_1_0_0 = def(1_000_0_00);
public static final TransportVersion V_1_1_0 = def(1_001_0_00);
public static final TransportVersion V_1_2_0 = def(1_002_0_00);
public static final TransportVersion V_1_2_1 = def(1_002_0_01);
public static final TransportVersion V_1_2_2 = def(1_002_0_02);
public static final TransportVersion SOME_OTHER_VERSION = def(1_003_0_00);
public static final TransportVersion YET_ANOTHER_VERSION = def(1_004_0_00);
public static final TransportVersion MINIMUM_COMPATIBLE = V_1_0_0;
}
""";
var expectedTransportVersions = """
public class TransportVersions {
public static final TransportVersion V_1_0_0 = def(1_000_0_00);
public static final TransportVersion V_1_1_0 = def(1_001_0_00);
public static final TransportVersion V_1_2_0 = def(1_002_0_00);
public static final TransportVersion V_1_2_1 = def(1_002_0_01);
public static final TransportVersion V_1_2_2 = def(1_002_0_02);
public static final TransportVersion SOME_OTHER_VERSION = def(1_003_0_00);
public static final TransportVersion PATCH_TRANSPORT_VERSION = def(1_003_0_01);
public static final TransportVersion YET_ANOTHER_VERSION = def(1_004_0_00);
public static final TransportVersion MINIMUM_COMPATIBLE = V_1_0_0;
}
""";
var unit = StaticJavaParser.parse(transportVersions);
var result = UpdateVersionsTask.addTransportVersionConstant(unit, "PATCH_TRANSPORT_VERSION", 1_003_0_01);
assertThat(result.isPresent(), is(true));
assertThat(result.get(), hasToString(expectedTransportVersions));
}
private static Optional<FieldDeclaration> findFirstField(Node node, String name) { private static Optional<FieldDeclaration> findFirstField(Node node, String name) {
return node.findFirst(FieldDeclaration.class, f -> f.getVariable(0).getName().getIdentifier().equals(name)); return node.findFirst(FieldDeclaration.class, f -> f.getVariable(0).getName().getIdentifier().equals(name));
} }

View file

@ -38,6 +38,7 @@ import org.elasticsearch.core.SuppressForbidden;
import org.elasticsearch.core.Tuple; import org.elasticsearch.core.Tuple;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.jdk.JarHell; import org.elasticsearch.jdk.JarHell;
import org.elasticsearch.jdk.RuntimeVersionFeature;
import org.elasticsearch.plugin.scanner.ClassReaders; import org.elasticsearch.plugin.scanner.ClassReaders;
import org.elasticsearch.plugin.scanner.NamedComponentScanner; import org.elasticsearch.plugin.scanner.NamedComponentScanner;
import org.elasticsearch.plugins.Platforms; import org.elasticsearch.plugins.Platforms;
@ -922,10 +923,12 @@ public class InstallPluginAction implements Closeable {
*/ */
private PluginDescriptor installPlugin(InstallablePlugin descriptor, Path tmpRoot, List<Path> deleteOnFailure) throws Exception { private PluginDescriptor installPlugin(InstallablePlugin descriptor, Path tmpRoot, List<Path> deleteOnFailure) throws Exception {
final PluginDescriptor info = loadPluginInfo(tmpRoot); final PluginDescriptor info = loadPluginInfo(tmpRoot);
PluginPolicyInfo pluginPolicy = PolicyUtil.getPluginPolicyInfo(tmpRoot, env.tmpDir()); if (RuntimeVersionFeature.isSecurityManagerAvailable()) {
if (pluginPolicy != null) { PluginPolicyInfo pluginPolicy = PolicyUtil.getPluginPolicyInfo(tmpRoot, env.tmpDir());
Set<String> permissions = PluginSecurity.getPermissionDescriptions(pluginPolicy, env.tmpDir()); if (pluginPolicy != null) {
PluginSecurity.confirmPolicyExceptions(terminal, permissions, batch); Set<String> permissions = PluginSecurity.getPermissionDescriptions(pluginPolicy, env.tmpDir());
PluginSecurity.confirmPolicyExceptions(terminal, permissions, batch);
}
} }
// Validate that the downloaded plugin's ID matches what we expect from the descriptor. The // Validate that the downloaded plugin's ID matches what we expect from the descriptor. The

View file

@ -0,0 +1,5 @@
pr: 119886
summary: Initial support for unmapped fields
area: ES|QL
type: feature
issues: []

View file

@ -0,0 +1,5 @@
pr: 121370
summary: Improve SLM Health Indicator to cover missing snapshot
area: ILM+SLM
type: enhancement
issues: []

View file

@ -0,0 +1,5 @@
pr: 122066
summary: Adding elser default endpoint for EIS
area: Machine Learning
type: enhancement
issues: []

View file

@ -0,0 +1,8 @@
pr: 122074
summary: If the Transform is configured to write to an alias as its destination index,
when the delete_dest_index parameter is set to true, then the Delete API will now
delete the write index backing the alias
area: Transform
type: bug
issues:
- 121913

View file

@ -0,0 +1,5 @@
pr: 122199
summary: Fix issues that prevents using search only snapshots for indices that use index sorting. This is includes Logsdb and time series indices.
area: Logs
type: bug
issues: []

View file

@ -0,0 +1,6 @@
pr: 122224
summary: Enable the use of nested field type with index.mode=time_series
area: Mapping
type: enhancement
issues:
- 120874

View file

@ -0,0 +1,5 @@
pr: 122257
summary: Revive inlinestats
area: ES|QL
type: bug
issues: []

View file

@ -0,0 +1,6 @@
pr: 122272
summary: "[Inference API] Rename `model_id` prop to model in EIS sparse inference\
\ request body"
area: Inference
type: enhancement
issues: []

View file

@ -0,0 +1,5 @@
pr: 122280
summary: Use `FallbackSyntheticSourceBlockLoader` for number fields
area: Mapping
type: enhancement
issues: []

View file

@ -0,0 +1,5 @@
pr: 122326
summary: System Index Migration Failure Results in a Non-Recoverable State
area: Infra/Core
type: bug
issues: []

View file

@ -0,0 +1,6 @@
pr: 122357
summary: Handle search timeout in `SuggestPhase`
area: Search
type: bug
issues:
- 122186

View file

@ -0,0 +1,5 @@
pr: 122365
summary: Fix handling of auto expand replicas for stateless indices
area: "Search"
type: bug
issues: []

View file

@ -0,0 +1,6 @@
pr: 122417
summary: Fix listener leak in exchange service
area: ES|QL
type: bug
issues:
- 122271

View file

@ -0,0 +1,5 @@
pr: 122425
summary: Fix synthetic source bug that would mishandle nested `dense_vector` fields
area: Mapping
type: bug
issues: []

View file

@ -0,0 +1,5 @@
pr: 122427
summary: Improve size limiting string message
area: Infra/Core
type: enhancement
issues: []

View file

@ -0,0 +1,5 @@
pr: 122496
summary: Deduplicate `IngestStats` and `IngestStats.Stats` identity records when deserializing
area: Ingest Node
type: bug
issues: []

View file

@ -23,6 +23,7 @@ public class TimeValue implements Comparable<TimeValue> {
public static final TimeValue MAX_VALUE = new TimeValue(Long.MAX_VALUE, TimeUnit.NANOSECONDS); public static final TimeValue MAX_VALUE = new TimeValue(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
public static final TimeValue THIRTY_SECONDS = new TimeValue(30, TimeUnit.SECONDS); public static final TimeValue THIRTY_SECONDS = new TimeValue(30, TimeUnit.SECONDS);
public static final TimeValue ONE_MINUTE = new TimeValue(1, TimeUnit.MINUTES); public static final TimeValue ONE_MINUTE = new TimeValue(1, TimeUnit.MINUTES);
public static final TimeValue ONE_HOUR = new TimeValue(1, TimeUnit.HOURS);
private static final long C0 = 1L; private static final long C0 = 1L;
private static final long C1 = C0 * 1000L; private static final long C1 = C0 * 1000L;

View file

@ -44,10 +44,11 @@ public class InstrumentationServiceImpl implements InstrumentationService {
return InstrumenterImpl.create(clazz, methods); return InstrumenterImpl.create(clazz, methods);
} }
@Override private interface CheckerMethodVisitor {
public Map<MethodKey, CheckMethod> lookupMethods(Class<?> checkerClass) throws IOException { void visit(Class<?> currentClass, int access, String checkerMethodName, String checkerMethodDescriptor);
Map<MethodKey, CheckMethod> methodsToInstrument = new HashMap<>(); }
private void visitClassAndSupers(Class<?> checkerClass, CheckerMethodVisitor checkerMethodVisitor) throws ClassNotFoundException {
Set<Class<?>> visitedClasses = new HashSet<>(); Set<Class<?>> visitedClasses = new HashSet<>();
ArrayDeque<Class<?>> classesToVisit = new ArrayDeque<>(Collections.singleton(checkerClass)); ArrayDeque<Class<?>> classesToVisit = new ArrayDeque<>(Collections.singleton(checkerClass));
while (classesToVisit.isEmpty() == false) { while (classesToVisit.isEmpty() == false) {
@ -57,52 +58,61 @@ public class InstrumentationServiceImpl implements InstrumentationService {
} }
visitedClasses.add(currentClass); visitedClasses.add(currentClass);
var classFileInfo = InstrumenterImpl.getClassFileInfo(currentClass); try {
ClassReader reader = new ClassReader(classFileInfo.bytecodes()); var classFileInfo = InstrumenterImpl.getClassFileInfo(currentClass);
ClassVisitor visitor = new ClassVisitor(Opcodes.ASM9) { ClassReader reader = new ClassReader(classFileInfo.bytecodes());
ClassVisitor visitor = new ClassVisitor(Opcodes.ASM9) {
@Override @Override
public void visit(int version, int access, String name, String signature, String superName, String[] interfaces) { public void visit(int version, int access, String name, String signature, String superName, String[] interfaces) {
super.visit(version, access, name, signature, superName, interfaces); super.visit(version, access, name, signature, superName, interfaces);
try { try {
if (OBJECT_INTERNAL_NAME.equals(superName) == false) { if (OBJECT_INTERNAL_NAME.equals(superName) == false) {
classesToVisit.add(Class.forName(Type.getObjectType(superName).getClassName())); classesToVisit.add(Class.forName(Type.getObjectType(superName).getClassName()));
}
for (var interfaceName : interfaces) {
classesToVisit.add(Class.forName(Type.getObjectType(interfaceName).getClassName()));
}
} catch (ClassNotFoundException e) {
throw new IllegalArgumentException("Cannot inspect checker class " + currentClass.getName(), e);
} }
for (var interfaceName : interfaces) {
classesToVisit.add(Class.forName(Type.getObjectType(interfaceName).getClassName()));
}
} catch (ClassNotFoundException e) {
throw new IllegalArgumentException("Cannot inspect checker class " + checkerClass.getName(), e);
} }
}
@Override @Override
public MethodVisitor visitMethod( public MethodVisitor visitMethod(
int access, int access,
String checkerMethodName, String checkerMethodName,
String checkerMethodDescriptor, String checkerMethodDescriptor,
String signature, String signature,
String[] exceptions String[] exceptions
) { ) {
var mv = super.visitMethod(access, checkerMethodName, checkerMethodDescriptor, signature, exceptions); var mv = super.visitMethod(access, checkerMethodName, checkerMethodDescriptor, signature, exceptions);
if (checkerMethodName.startsWith(InstrumentationService.CHECK_METHOD_PREFIX)) { checkerMethodVisitor.visit(currentClass, access, checkerMethodName, checkerMethodDescriptor);
var checkerMethodArgumentTypes = Type.getArgumentTypes(checkerMethodDescriptor); return mv;
var methodToInstrument = parseCheckerMethodSignature(checkerMethodName, checkerMethodArgumentTypes);
var checkerParameterDescriptors = Arrays.stream(checkerMethodArgumentTypes).map(Type::getDescriptor).toList();
var checkMethod = new CheckMethod(
Type.getInternalName(currentClass),
checkerMethodName,
checkerParameterDescriptors
);
methodsToInstrument.putIfAbsent(methodToInstrument, checkMethod);
} }
return mv; };
} reader.accept(visitor, 0);
}; } catch (IOException e) {
reader.accept(visitor, 0); throw new ClassNotFoundException("Cannot find a definition for class [" + checkerClass.getName() + "]", e);
}
} }
}
@Override
public Map<MethodKey, CheckMethod> lookupMethods(Class<?> checkerClass) throws ClassNotFoundException {
Map<MethodKey, CheckMethod> methodsToInstrument = new HashMap<>();
visitClassAndSupers(checkerClass, (currentClass, access, checkerMethodName, checkerMethodDescriptor) -> {
if (checkerMethodName.startsWith(InstrumentationService.CHECK_METHOD_PREFIX)) {
var checkerMethodArgumentTypes = Type.getArgumentTypes(checkerMethodDescriptor);
var methodToInstrument = parseCheckerMethodSignature(checkerMethodName, checkerMethodArgumentTypes);
var checkerParameterDescriptors = Arrays.stream(checkerMethodArgumentTypes).map(Type::getDescriptor).toList();
var checkMethod = new CheckMethod(Type.getInternalName(currentClass), checkerMethodName, checkerParameterDescriptors);
methodsToInstrument.putIfAbsent(methodToInstrument, checkMethod);
}
});
return methodsToInstrument; return methodsToInstrument;
} }
@ -110,14 +120,14 @@ public class InstrumentationServiceImpl implements InstrumentationService {
@Override @Override
public InstrumentationInfo lookupImplementationMethod( public InstrumentationInfo lookupImplementationMethod(
Class<?> targetSuperclass, Class<?> targetSuperclass,
String methodName, String targetMethodName,
Class<?> implementationClass, Class<?> implementationClass,
Class<?> checkerClass, Class<?> checkerClass,
String checkMethodName, String checkMethodName,
Class<?>... parameterTypes Class<?>... parameterTypes
) throws NoSuchMethodException, ClassNotFoundException { ) throws NoSuchMethodException, ClassNotFoundException {
var targetMethod = targetSuperclass.getDeclaredMethod(methodName, parameterTypes); var targetMethod = targetSuperclass.getDeclaredMethod(targetMethodName, parameterTypes);
var implementationMethod = implementationClass.getMethod(targetMethod.getName(), targetMethod.getParameterTypes()); var implementationMethod = implementationClass.getMethod(targetMethod.getName(), targetMethod.getParameterTypes());
validateTargetMethod(implementationClass, targetMethod, implementationMethod); validateTargetMethod(implementationClass, targetMethod, implementationMethod);
@ -128,33 +138,15 @@ public class InstrumentationServiceImpl implements InstrumentationService {
CheckMethod[] checkMethod = new CheckMethod[1]; CheckMethod[] checkMethod = new CheckMethod[1];
try { visitClassAndSupers(checkerClass, (currentClass, access, methodName, methodDescriptor) -> {
InstrumenterImpl.ClassFileInfo classFileInfo = InstrumenterImpl.getClassFileInfo(checkerClass); if (methodName.equals(checkMethodName)) {
ClassReader reader = new ClassReader(classFileInfo.bytecodes()); var methodArgumentTypes = Type.getArgumentTypes(methodDescriptor);
ClassVisitor visitor = new ClassVisitor(Opcodes.ASM9) { if (Arrays.equals(methodArgumentTypes, checkMethodArgumentTypes)) {
@Override var checkerParameterDescriptors = Arrays.stream(methodArgumentTypes).map(Type::getDescriptor).toList();
public MethodVisitor visitMethod( checkMethod[0] = new CheckMethod(Type.getInternalName(currentClass), methodName, checkerParameterDescriptors);
int access,
String methodName,
String methodDescriptor,
String signature,
String[] exceptions
) {
var mv = super.visitMethod(access, methodName, methodDescriptor, signature, exceptions);
if (methodName.equals(checkMethodName)) {
var methodArgumentTypes = Type.getArgumentTypes(methodDescriptor);
if (Arrays.equals(methodArgumentTypes, checkMethodArgumentTypes)) {
var checkerParameterDescriptors = Arrays.stream(methodArgumentTypes).map(Type::getDescriptor).toList();
checkMethod[0] = new CheckMethod(Type.getInternalName(checkerClass), methodName, checkerParameterDescriptors);
}
}
return mv;
} }
}; }
reader.accept(visitor, 0); });
} catch (IOException e) {
throw new ClassNotFoundException("Cannot find a definition for class [" + checkerClass.getName() + "]", e);
}
if (checkMethod[0] == null) { if (checkMethod[0] == null) {
throw new NoSuchMethodException( throw new NoSuchMethodException(

View file

@ -152,14 +152,13 @@ public class InstrumenterImpl implements Instrumenter {
if (isAnnotationPresent == false) { if (isAnnotationPresent == false) {
boolean isStatic = (access & ACC_STATIC) != 0; boolean isStatic = (access & ACC_STATIC) != 0;
boolean isCtor = "<init>".equals(name); boolean isCtor = "<init>".equals(name);
boolean hasReceiver = (isStatic || isCtor) == false;
var key = new MethodKey(className, name, Stream.of(Type.getArgumentTypes(descriptor)).map(Type::getInternalName).toList()); var key = new MethodKey(className, name, Stream.of(Type.getArgumentTypes(descriptor)).map(Type::getInternalName).toList());
var instrumentationMethod = checkMethods.get(key); var instrumentationMethod = checkMethods.get(key);
if (instrumentationMethod != null) { if (instrumentationMethod != null) {
// LOGGER.debug("Will instrument method {}", key); // System.out.println("Will instrument method " + key);
return new EntitlementMethodVisitor(Opcodes.ASM9, mv, isStatic, isCtor, descriptor, instrumentationMethod); return new EntitlementMethodVisitor(Opcodes.ASM9, mv, isStatic, isCtor, descriptor, instrumentationMethod);
} else { } else {
// LOGGER.trace("Will not instrument method {}", key); // System.out.println("Will not instrument method " + key);
} }
} }
return mv; return mv;

View file

@ -15,7 +15,6 @@ import org.elasticsearch.entitlement.instrumentation.MethodKey;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.objectweb.asm.Type; import org.objectweb.asm.Type;
import java.io.IOException;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -90,7 +89,9 @@ public class InstrumentationServiceImplTests extends ESTestCase {
void checkInstanceMethodManual(Class<?> clazz, TestTargetBaseClass that, int x, String y); void checkInstanceMethodManual(Class<?> clazz, TestTargetBaseClass that, int x, String y);
} }
public void testInstrumentationTargetLookup() throws IOException { interface TestCheckerDerived3 extends TestCheckerMixed {}
public void testInstrumentationTargetLookup() throws ClassNotFoundException {
Map<MethodKey, CheckMethod> checkMethods = instrumentationService.lookupMethods(TestChecker.class); Map<MethodKey, CheckMethod> checkMethods = instrumentationService.lookupMethods(TestChecker.class);
assertThat(checkMethods, aMapWithSize(3)); assertThat(checkMethods, aMapWithSize(3));
@ -143,7 +144,7 @@ public class InstrumentationServiceImplTests extends ESTestCase {
); );
} }
public void testInstrumentationTargetLookupWithOverloads() throws IOException { public void testInstrumentationTargetLookupWithOverloads() throws ClassNotFoundException {
Map<MethodKey, CheckMethod> checkMethods = instrumentationService.lookupMethods(TestCheckerOverloads.class); Map<MethodKey, CheckMethod> checkMethods = instrumentationService.lookupMethods(TestCheckerOverloads.class);
assertThat(checkMethods, aMapWithSize(2)); assertThat(checkMethods, aMapWithSize(2));
@ -175,7 +176,7 @@ public class InstrumentationServiceImplTests extends ESTestCase {
); );
} }
public void testInstrumentationTargetLookupWithDerivedClass() throws IOException { public void testInstrumentationTargetLookupWithDerivedClass() throws ClassNotFoundException {
Map<MethodKey, CheckMethod> checkMethods = instrumentationService.lookupMethods(TestCheckerDerived2.class); Map<MethodKey, CheckMethod> checkMethods = instrumentationService.lookupMethods(TestCheckerDerived2.class);
assertThat(checkMethods, aMapWithSize(4)); assertThat(checkMethods, aMapWithSize(4));
@ -244,7 +245,7 @@ public class InstrumentationServiceImplTests extends ESTestCase {
); );
} }
public void testInstrumentationTargetLookupWithCtors() throws IOException { public void testInstrumentationTargetLookupWithCtors() throws ClassNotFoundException {
Map<MethodKey, CheckMethod> checkMethods = instrumentationService.lookupMethods(TestCheckerCtors.class); Map<MethodKey, CheckMethod> checkMethods = instrumentationService.lookupMethods(TestCheckerCtors.class);
assertThat(checkMethods, aMapWithSize(2)); assertThat(checkMethods, aMapWithSize(2));
@ -276,7 +277,7 @@ public class InstrumentationServiceImplTests extends ESTestCase {
); );
} }
public void testInstrumentationTargetLookupWithExtraMethods() throws IOException { public void testInstrumentationTargetLookupWithExtraMethods() throws ClassNotFoundException {
Map<MethodKey, CheckMethod> checkMethods = instrumentationService.lookupMethods(TestCheckerMixed.class); Map<MethodKey, CheckMethod> checkMethods = instrumentationService.lookupMethods(TestCheckerMixed.class);
assertThat(checkMethods, aMapWithSize(1)); assertThat(checkMethods, aMapWithSize(1));
@ -371,7 +372,7 @@ public class InstrumentationServiceImplTests extends ESTestCase {
); );
} }
public void testLookupImplementationMethodWithInheritance() throws ClassNotFoundException, NoSuchMethodException { public void testLookupImplementationMethodWithInheritanceOnTarget() throws ClassNotFoundException, NoSuchMethodException {
var info = instrumentationService.lookupImplementationMethod( var info = instrumentationService.lookupImplementationMethod(
TestTargetBaseClass.class, TestTargetBaseClass.class,
"instanceMethod2", "instanceMethod2",
@ -409,6 +410,44 @@ public class InstrumentationServiceImplTests extends ESTestCase {
); );
} }
public void testLookupImplementationMethodWithInheritanceOnChecker() throws ClassNotFoundException, NoSuchMethodException {
var info = instrumentationService.lookupImplementationMethod(
TestTargetBaseClass.class,
"instanceMethod2",
TestTargetImplementationClass.class,
TestCheckerDerived3.class,
"checkInstanceMethodManual",
int.class,
String.class
);
assertThat(
info.targetMethod(),
equalTo(
new MethodKey(
"org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetIntermediateClass",
"instanceMethod2",
List.of("I", "java/lang/String")
)
)
);
assertThat(
info.checkMethod(),
equalTo(
new CheckMethod(
"org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestCheckerMixed",
"checkInstanceMethodManual",
List.of(
"Ljava/lang/Class;",
"Lorg/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetBaseClass;",
"I",
"Ljava/lang/String;"
)
)
)
);
}
public void testParseCheckerMethodSignatureStaticMethod() { public void testParseCheckerMethodSignatureStaticMethod() {
var methodKey = InstrumentationServiceImpl.parseCheckerMethodSignature( var methodKey = InstrumentationServiceImpl.parseCheckerMethodSignature(
"check$org_example_TestClass$$staticMethod", "check$org_example_TestClass$$staticMethod",

View file

@ -35,6 +35,7 @@ import java.net.ServerSocket;
import java.net.Socket; import java.net.Socket;
import java.net.SocketAddress; import java.net.SocketAddress;
import java.net.SocketImplFactory; import java.net.SocketImplFactory;
import java.net.URI;
import java.net.URL; import java.net.URL;
import java.net.URLStreamHandler; import java.net.URLStreamHandler;
import java.net.URLStreamHandlerFactory; import java.net.URLStreamHandlerFactory;
@ -50,16 +51,25 @@ import java.nio.channels.ServerSocketChannel;
import java.nio.channels.SocketChannel; import java.nio.channels.SocketChannel;
import java.nio.channels.spi.SelectorProvider; import java.nio.channels.spi.SelectorProvider;
import java.nio.charset.Charset; import java.nio.charset.Charset;
import java.nio.file.AccessMode;
import java.nio.file.CopyOption;
import java.nio.file.DirectoryStream;
import java.nio.file.FileStore;
import java.nio.file.LinkOption; import java.nio.file.LinkOption;
import java.nio.file.OpenOption; import java.nio.file.OpenOption;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.attribute.FileAttribute;
import java.nio.file.attribute.UserPrincipal; import java.nio.file.attribute.UserPrincipal;
import java.nio.file.spi.FileSystemProvider; import java.nio.file.spi.FileSystemProvider;
import java.security.cert.CertStoreParameters; import java.security.cert.CertStoreParameters;
import java.util.List; import java.util.List;
import java.util.Locale; import java.util.Locale;
import java.util.Map;
import java.util.Properties; import java.util.Properties;
import java.util.Set;
import java.util.TimeZone; import java.util.TimeZone;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ForkJoinPool;
import java.util.function.Consumer; import java.util.function.Consumer;
import javax.net.ssl.HostnameVerifier; import javax.net.ssl.HostnameVerifier;
@ -500,6 +510,36 @@ public interface EntitlementChecker {
// //
// old io (ie File) // old io (ie File)
void check$java_io_File$createNewFile(Class<?> callerClass, File file);
void check$java_io_File$$createTempFile(Class<?> callerClass, String prefix, String suffix, File directory);
void check$java_io_File$delete(Class<?> callerClass, File file);
void check$java_io_File$deleteOnExit(Class<?> callerClass, File file);
void check$java_io_File$mkdir(Class<?> callerClass, File file);
void check$java_io_File$mkdirs(Class<?> callerClass, File file);
void check$java_io_File$renameTo(Class<?> callerClass, File file, File dest);
void check$java_io_File$setExecutable(Class<?> callerClass, File file, boolean executable);
void check$java_io_File$setExecutable(Class<?> callerClass, File file, boolean executable, boolean ownerOnly);
void check$java_io_File$setLastModified(Class<?> callerClass, File file, long time);
void check$java_io_File$setReadable(Class<?> callerClass, File file, boolean readable);
void check$java_io_File$setReadable(Class<?> callerClass, File file, boolean readable, boolean ownerOnly);
void check$java_io_File$setReadOnly(Class<?> callerClass, File file);
void check$java_io_File$setWritable(Class<?> callerClass, File file, boolean writable);
void check$java_io_File$setWritable(Class<?> callerClass, File file, boolean writable, boolean ownerOnly);
void check$java_io_FileOutputStream$(Class<?> callerClass, File file); void check$java_io_FileOutputStream$(Class<?> callerClass, File file);
void check$java_io_FileOutputStream$(Class<?> callerClass, File file, boolean append); void check$java_io_FileOutputStream$(Class<?> callerClass, File file, boolean append);
@ -522,5 +562,117 @@ public interface EntitlementChecker {
void check$java_nio_file_Files$$setOwner(Class<?> callerClass, Path path, UserPrincipal principal); void check$java_nio_file_Files$$setOwner(Class<?> callerClass, Path path, UserPrincipal principal);
// file system providers // file system providers
void check$java_nio_file_spi_FileSystemProvider$(Class<?> callerClass);
void checkNewFileSystem(Class<?> callerClass, FileSystemProvider that, URI uri, Map<String, ?> env);
void checkNewFileSystem(Class<?> callerClass, FileSystemProvider that, Path path, Map<String, ?> env);
void checkNewInputStream(Class<?> callerClass, FileSystemProvider that, Path path, OpenOption... options); void checkNewInputStream(Class<?> callerClass, FileSystemProvider that, Path path, OpenOption... options);
void checkNewOutputStream(Class<?> callerClass, FileSystemProvider that, Path path, OpenOption... options);
void checkNewFileChannel(
Class<?> callerClass,
FileSystemProvider that,
Path path,
Set<? extends OpenOption> options,
FileAttribute<?>... attrs
);
void checkNewAsynchronousFileChannel(
Class<?> callerClass,
FileSystemProvider that,
Path path,
Set<? extends OpenOption> options,
ExecutorService executor,
FileAttribute<?>... attrs
);
void checkNewByteChannel(
Class<?> callerClass,
FileSystemProvider that,
Path path,
Set<? extends OpenOption> options,
FileAttribute<?>... attrs
);
void checkNewDirectoryStream(Class<?> callerClass, FileSystemProvider that, Path dir, DirectoryStream.Filter<? super Path> filter);
void checkCreateDirectory(Class<?> callerClass, FileSystemProvider that, Path dir, FileAttribute<?>... attrs);
void checkCreateSymbolicLink(Class<?> callerClass, FileSystemProvider that, Path link, Path target, FileAttribute<?>... attrs);
void checkCreateLink(Class<?> callerClass, FileSystemProvider that, Path link, Path existing);
void checkDelete(Class<?> callerClass, FileSystemProvider that, Path path);
void checkDeleteIfExists(Class<?> callerClass, FileSystemProvider that, Path path);
void checkReadSymbolicLink(Class<?> callerClass, FileSystemProvider that, Path link);
void checkCopy(Class<?> callerClass, FileSystemProvider that, Path source, Path target, CopyOption... options);
void checkMove(Class<?> callerClass, FileSystemProvider that, Path source, Path target, CopyOption... options);
void checkIsSameFile(Class<?> callerClass, FileSystemProvider that, Path path, Path path2);
void checkIsHidden(Class<?> callerClass, FileSystemProvider that, Path path);
void checkGetFileStore(Class<?> callerClass, FileSystemProvider that, Path path);
void checkCheckAccess(Class<?> callerClass, FileSystemProvider that, Path path, AccessMode... modes);
void checkGetFileAttributeView(Class<?> callerClass, FileSystemProvider that, Path path, Class<?> type, LinkOption... options);
void checkReadAttributes(Class<?> callerClass, FileSystemProvider that, Path path, Class<?> type, LinkOption... options);
void checkReadAttributes(Class<?> callerClass, FileSystemProvider that, Path path, String attributes, LinkOption... options);
void checkReadAttributesIfExists(Class<?> callerClass, FileSystemProvider that, Path path, Class<?> type, LinkOption... options);
void checkSetAttribute(Class<?> callerClass, FileSystemProvider that, Path path, String attribute, Object value, LinkOption... options);
void checkExists(Class<?> callerClass, FileSystemProvider that, Path path, LinkOption... options);
// file store
void checkGetFileStoreAttributeView(Class<?> callerClass, FileStore that, Class<?> type);
void checkGetAttribute(Class<?> callerClass, FileStore that, String attribute);
void checkGetBlockSize(Class<?> callerClass, FileStore that);
void checkGetTotalSpace(Class<?> callerClass, FileStore that);
void checkGetUnallocatedSpace(Class<?> callerClass, FileStore that);
void checkGetUsableSpace(Class<?> callerClass, FileStore that);
void checkIsReadOnly(Class<?> callerClass, FileStore that);
void checkName(Class<?> callerClass, FileStore that);
void checkType(Class<?> callerClass, FileStore that);
////////////////////
//
// Thread management
//
void check$java_lang_Thread$start(Class<?> callerClass, Thread thread);
void check$java_lang_Thread$setDaemon(Class<?> callerClass, Thread thread, boolean on);
void check$java_lang_ThreadGroup$setDaemon(Class<?> callerClass, ThreadGroup threadGroup, boolean daemon);
void check$java_util_concurrent_ForkJoinPool$setParallelism(Class<?> callerClass, ForkJoinPool forkJoinPool, int size);
void check$java_lang_Thread$setName(Class<?> callerClass, Thread thread, String name);
void check$java_lang_Thread$setPriority(Class<?> callerClass, Thread thread, int newPriority);
void check$java_lang_Thread$setUncaughtExceptionHandler(Class<?> callerClass, Thread thread, Thread.UncaughtExceptionHandler ueh);
void check$java_lang_ThreadGroup$setMaxPriority(Class<?> callerClass, ThreadGroup threadGroup, int pri);
} }

View file

@ -14,17 +14,47 @@ import org.elasticsearch.core.SuppressForbidden;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.attribute.UserPrincipal; import java.nio.file.attribute.UserPrincipal;
import java.security.SecureRandom;
@SuppressForbidden(reason = "Exposes forbidden APIs for testing purposes")
public final class EntitledActions { public final class EntitledActions {
private EntitledActions() {} private EntitledActions() {}
@SuppressForbidden(reason = "Exposes forbidden APIs for testing purposes") private static final SecureRandom random = new SecureRandom();
static void System_clearProperty(String key) {
System.clearProperty(key); private static final Path testRootDir = Paths.get(System.getProperty("es.entitlements.testdir"));
private static Path readDir() {
return testRootDir.resolve("read_dir");
}
private static Path readWriteDir() {
return testRootDir.resolve("read_write_dir");
} }
public static UserPrincipal getFileOwner(Path path) throws IOException { public static UserPrincipal getFileOwner(Path path) throws IOException {
return Files.getOwner(path); return Files.getOwner(path);
} }
public static void createFile(Path path) throws IOException {
Files.createFile(path);
}
public static Path createTempFileForRead() throws IOException {
return Files.createFile(readDir().resolve("entitlements-" + random.nextLong() + ".tmp"));
}
public static Path createTempFileForWrite() throws IOException {
return Files.createFile(readWriteDir().resolve("entitlements-" + random.nextLong() + ".tmp"));
}
public static Path createTempDirectoryForWrite() throws IOException {
return Files.createDirectory(readWriteDir().resolve("entitlements-dir-" + random.nextLong()));
}
public static Path createTempSymbolicLink() throws IOException {
return Files.createSymbolicLink(readDir().resolve("entitlements-link-" + random.nextLong()), readWriteDir());
}
} }

View file

@ -15,7 +15,7 @@ import org.elasticsearch.logging.Logger;
import org.elasticsearch.plugins.ExtensiblePlugin; import org.elasticsearch.plugins.ExtensiblePlugin;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import static org.elasticsearch.entitlement.qa.entitled.EntitledActions.System_clearProperty; import java.util.concurrent.atomic.AtomicBoolean;
public class EntitledPlugin extends Plugin implements ExtensiblePlugin { public class EntitledPlugin extends Plugin implements ExtensiblePlugin {
@ -28,11 +28,19 @@ public class EntitledPlugin extends Plugin implements ExtensiblePlugin {
selfTestNotEntitled(); selfTestNotEntitled();
} }
private static final String SELF_TEST_PROPERTY = "org.elasticsearch.entitlement.qa.selfTest";
private static void selfTestEntitled() { private static void selfTestEntitled() {
logger.debug("selfTestEntitled"); logger.debug("selfTestEntitled");
System_clearProperty(SELF_TEST_PROPERTY); AtomicBoolean threadRan = new AtomicBoolean(false);
try {
Thread testThread = new Thread(() -> threadRan.set(true), "testThread");
testThread.start();
testThread.join();
} catch (InterruptedException e) {
throw new AssertionError(e);
}
if (threadRan.get() == false) {
throw new AssertionError("Self-test thread did not run");
}
} }
private static void selfTestNotEntitled() { private static void selfTestNotEntitled() {

View file

@ -23,11 +23,13 @@ import java.net.Socket;
import java.net.SocketAddress; import java.net.SocketAddress;
import java.net.SocketException; import java.net.SocketException;
import java.net.SocketImpl; import java.net.SocketImpl;
import java.net.URI;
import java.nio.channels.AsynchronousChannelGroup; import java.nio.channels.AsynchronousChannelGroup;
import java.nio.channels.AsynchronousServerSocketChannel; import java.nio.channels.AsynchronousServerSocketChannel;
import java.nio.channels.AsynchronousSocketChannel; import java.nio.channels.AsynchronousSocketChannel;
import java.nio.channels.DatagramChannel; import java.nio.channels.DatagramChannel;
import java.nio.channels.Pipe; import java.nio.channels.Pipe;
import java.nio.channels.SeekableByteChannel;
import java.nio.channels.ServerSocketChannel; import java.nio.channels.ServerSocketChannel;
import java.nio.channels.SocketChannel; import java.nio.channels.SocketChannel;
import java.nio.channels.spi.AbstractSelector; import java.nio.channels.spi.AbstractSelector;
@ -35,6 +37,18 @@ import java.nio.channels.spi.AsynchronousChannelProvider;
import java.nio.channels.spi.SelectorProvider; import java.nio.channels.spi.SelectorProvider;
import java.nio.charset.Charset; import java.nio.charset.Charset;
import java.nio.charset.spi.CharsetProvider; import java.nio.charset.spi.CharsetProvider;
import java.nio.file.AccessMode;
import java.nio.file.CopyOption;
import java.nio.file.DirectoryStream;
import java.nio.file.FileStore;
import java.nio.file.FileSystem;
import java.nio.file.LinkOption;
import java.nio.file.OpenOption;
import java.nio.file.Path;
import java.nio.file.attribute.BasicFileAttributes;
import java.nio.file.attribute.FileAttribute;
import java.nio.file.attribute.FileAttributeView;
import java.nio.file.spi.FileSystemProvider;
import java.security.cert.Certificate; import java.security.cert.Certificate;
import java.text.BreakIterator; import java.text.BreakIterator;
import java.text.Collator; import java.text.Collator;
@ -51,6 +65,7 @@ import java.text.spi.NumberFormatProvider;
import java.util.Iterator; import java.util.Iterator;
import java.util.Locale; import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadFactory;
import java.util.spi.CalendarDataProvider; import java.util.spi.CalendarDataProvider;
@ -568,4 +583,97 @@ class DummyImplementations {
return null; return null;
} }
} }
static class DummyFileSystemProvider extends FileSystemProvider {
@Override
public String getScheme() {
return "";
}
@Override
public FileSystem newFileSystem(URI uri, Map<String, ?> env) throws IOException {
return null;
}
@Override
public FileSystem getFileSystem(URI uri) {
return null;
}
@Override
public Path getPath(URI uri) {
return null;
}
@Override
public SeekableByteChannel newByteChannel(Path path, Set<? extends OpenOption> options, FileAttribute<?>... attrs)
throws IOException {
return null;
}
@Override
public DirectoryStream<Path> newDirectoryStream(Path dir, DirectoryStream.Filter<? super Path> filter) throws IOException {
return null;
}
@Override
public void createDirectory(Path dir, FileAttribute<?>... attrs) throws IOException {
}
@Override
public void delete(Path path) throws IOException {
}
@Override
public void copy(Path source, Path target, CopyOption... options) throws IOException {
}
@Override
public void move(Path source, Path target, CopyOption... options) throws IOException {
}
@Override
public boolean isSameFile(Path path, Path path2) throws IOException {
return false;
}
@Override
public boolean isHidden(Path path) throws IOException {
return false;
}
@Override
public FileStore getFileStore(Path path) throws IOException {
return null;
}
@Override
public void checkAccess(Path path, AccessMode... modes) throws IOException {
}
@Override
public <V extends FileAttributeView> V getFileAttributeView(Path path, Class<V> type, LinkOption... options) {
return null;
}
@Override
public <A extends BasicFileAttributes> A readAttributes(Path path, Class<A> type, LinkOption... options) throws IOException {
return null;
}
@Override
public Map<String, Object> readAttributes(Path path, String attributes, LinkOption... options) throws IOException {
return Map.of();
}
@Override
public void setAttribute(Path path, String attribute, Object value, LinkOption... options) throws IOException {
}
}
} }

View file

@ -12,6 +12,7 @@ package org.elasticsearch.entitlement.qa.test;
import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.SuppressForbidden;
import org.elasticsearch.entitlement.qa.entitled.EntitledActions; import org.elasticsearch.entitlement.qa.entitled.EntitledActions;
import java.io.File;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.io.FileOutputStream; import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
@ -27,24 +28,109 @@ import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAcce
@SuppressForbidden(reason = "Explicitly checking APIs that are forbidden") @SuppressForbidden(reason = "Explicitly checking APIs that are forbidden")
class FileCheckActions { class FileCheckActions {
private static Path testRootDir = Paths.get(System.getProperty("es.entitlements.testdir")); static Path testRootDir = Paths.get(System.getProperty("es.entitlements.testdir"));
private static Path readDir() { static Path readDir() {
return testRootDir.resolve("read_dir"); return testRootDir.resolve("read_dir");
} }
private static Path readWriteDir() { static Path readWriteDir() {
return testRootDir.resolve("read_write_dir"); return testRootDir.resolve("read_write_dir");
} }
private static Path readFile() { static Path readFile() {
return testRootDir.resolve("read_file"); return testRootDir.resolve("read_file");
} }
private static Path readWriteFile() { static Path readWriteFile() {
return testRootDir.resolve("read_write_file"); return testRootDir.resolve("read_write_file");
} }
@EntitlementTest(expectedAccess = PLUGINS)
static void fileCreateNewFile() throws IOException {
readWriteDir().resolve("new_file").toFile().createNewFile();
}
@EntitlementTest(expectedAccess = PLUGINS)
static void fileCreateTempFile() throws IOException {
File.createTempFile("prefix", "suffix", readWriteDir().toFile());
}
@EntitlementTest(expectedAccess = PLUGINS)
static void fileDelete() throws IOException {
Path toDelete = readWriteDir().resolve("to_delete");
EntitledActions.createFile(toDelete);
toDelete.toFile().delete();
}
@EntitlementTest(expectedAccess = PLUGINS)
static void fileDeleteOnExit() throws IOException {
Path toDelete = readWriteDir().resolve("to_delete_on_exit");
EntitledActions.createFile(toDelete);
toDelete.toFile().deleteOnExit();
}
@EntitlementTest(expectedAccess = PLUGINS)
static void fileMkdir() throws IOException {
Path mkdir = readWriteDir().resolve("mkdir");
mkdir.toFile().mkdir();
}
@EntitlementTest(expectedAccess = PLUGINS)
static void fileMkdirs() throws IOException {
Path mkdir = readWriteDir().resolve("mkdirs");
mkdir.toFile().mkdirs();
}
@EntitlementTest(expectedAccess = PLUGINS)
static void fileRenameTo() throws IOException {
Path toRename = readWriteDir().resolve("to_rename");
EntitledActions.createFile(toRename);
toRename.toFile().renameTo(readWriteDir().resolve("renamed").toFile());
}
@EntitlementTest(expectedAccess = PLUGINS)
static void fileSetExecutable() throws IOException {
readWriteFile().toFile().setExecutable(false);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void fileSetExecutableOwner() throws IOException {
readWriteFile().toFile().setExecutable(false, false);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void fileSetLastModified() throws IOException {
readWriteFile().toFile().setLastModified(System.currentTimeMillis());
}
@EntitlementTest(expectedAccess = PLUGINS)
static void fileSetReadable() throws IOException {
readWriteFile().toFile().setReadable(true);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void fileSetReadableOwner() throws IOException {
readWriteFile().toFile().setReadable(true, false);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void fileSetReadOnly() throws IOException {
Path readOnly = readWriteDir().resolve("read_only");
EntitledActions.createFile(readOnly);
readOnly.toFile().setReadOnly();
}
@EntitlementTest(expectedAccess = PLUGINS)
static void fileSetWritable() throws IOException {
readWriteFile().toFile().setWritable(true);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void fileSetWritableOwner() throws IOException {
readWriteFile().toFile().setWritable(true, false);
}
@EntitlementTest(expectedAccess = PLUGINS) @EntitlementTest(expectedAccess = PLUGINS)
static void createScannerFile() throws FileNotFoundException { static void createScannerFile() throws FileNotFoundException {
new Scanner(readFile().toFile()); new Scanner(readFile().toFile());

View file

@ -0,0 +1,71 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.entitlement.qa.test;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.attribute.FileStoreAttributeView;
import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.ALWAYS_DENIED;
import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.SERVER_ONLY;
class FileStoreActions {
@EntitlementTest(expectedAccess = ALWAYS_DENIED)
static void checkGetFileStoreAttributeView() throws IOException {
Files.getFileStore(FileCheckActions.readWriteFile()).getFileStoreAttributeView(FileStoreAttributeView.class);
}
@EntitlementTest(expectedAccess = SERVER_ONLY)
static void checkGetAttribute() throws IOException {
try {
Files.getFileStore(FileCheckActions.readFile()).getAttribute("zfs:compression");
} catch (UnsupportedOperationException e) {
// It's OK if the attribute view is not available or it does not support reading the attribute
}
}
@EntitlementTest(expectedAccess = SERVER_ONLY)
static void checkGetBlockSize() throws IOException {
Files.getFileStore(FileCheckActions.readWriteFile()).getBlockSize();
}
@EntitlementTest(expectedAccess = SERVER_ONLY)
static void checkGetTotalSpace() throws IOException {
Files.getFileStore(FileCheckActions.readWriteFile()).getTotalSpace();
}
@EntitlementTest(expectedAccess = SERVER_ONLY)
static void checkGetUnallocatedSpace() throws IOException {
Files.getFileStore(FileCheckActions.readWriteFile()).getUnallocatedSpace();
}
@EntitlementTest(expectedAccess = SERVER_ONLY)
static void checkGetUsableSpace() throws IOException {
Files.getFileStore(FileCheckActions.readFile()).getUsableSpace();
}
@EntitlementTest(expectedAccess = SERVER_ONLY)
static void checkIsReadOnly() throws IOException {
Files.getFileStore(FileCheckActions.readFile()).isReadOnly();
}
@EntitlementTest(expectedAccess = SERVER_ONLY)
static void checkName() throws IOException {
Files.getFileStore(FileCheckActions.readFile()).name();
}
@EntitlementTest(expectedAccess = SERVER_ONLY)
static void checkType() throws IOException {
Files.getFileStore(FileCheckActions.readFile()).type();
}
private FileStoreActions() {}
}

View file

@ -0,0 +1,69 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.entitlement.qa.test;
import org.elasticsearch.core.SuppressForbidden;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.atomic.AtomicBoolean;
import static java.lang.Thread.currentThread;
import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.PLUGINS;
@SuppressForbidden(reason = "testing entitlements")
@SuppressWarnings("unused") // used via reflection
class ManageThreadsActions {
private ManageThreadsActions() {}
@EntitlementTest(expectedAccess = PLUGINS)
static void java_lang_Thread$start() throws InterruptedException {
AtomicBoolean threadRan = new AtomicBoolean(false);
Thread thread = new Thread(() -> threadRan.set(true), "test");
thread.start();
thread.join();
assert threadRan.get();
}
@EntitlementTest(expectedAccess = PLUGINS)
static void java_lang_Thread$setDaemon() {
new Thread().setDaemon(true);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void java_lang_ThreadGroup$setDaemon() {
currentThread().getThreadGroup().setDaemon(currentThread().getThreadGroup().isDaemon());
}
@EntitlementTest(expectedAccess = PLUGINS)
static void java_util_concurrent_ForkJoinPool$setParallelism() {
ForkJoinPool.commonPool().setParallelism(ForkJoinPool.commonPool().getParallelism());
}
@EntitlementTest(expectedAccess = PLUGINS)
static void java_lang_Thread$setName() {
currentThread().setName(currentThread().getName());
}
@EntitlementTest(expectedAccess = PLUGINS)
static void java_lang_Thread$setPriority() {
currentThread().setPriority(currentThread().getPriority());
}
@EntitlementTest(expectedAccess = PLUGINS)
static void java_lang_Thread$setUncaughtExceptionHandler() {
currentThread().setUncaughtExceptionHandler(currentThread().getUncaughtExceptionHandler());
}
@EntitlementTest(expectedAccess = PLUGINS)
static void java_lang_ThreadGroup$setMaxPriority() {
currentThread().getThreadGroup().setMaxPriority(currentThread().getThreadGroup().getMaxPriority());
}
}

View file

@ -0,0 +1,230 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.entitlement.qa.test;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.entitlement.qa.entitled.EntitledActions;
import java.io.IOException;
import java.net.URI;
import java.nio.file.FileSystemException;
import java.nio.file.FileSystems;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.nio.file.attribute.BasicFileAttributes;
import java.nio.file.attribute.FileOwnerAttributeView;
import java.util.Map;
import java.util.Set;
import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.ALWAYS_DENIED;
import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.PLUGINS;
import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.SERVER_ONLY;
class NioFileSystemActions {
@EntitlementTest(expectedAccess = SERVER_ONLY)
static void createFileSystemProvider() {
new DummyImplementations.DummyFileSystemProvider();
}
@EntitlementTest(expectedAccess = ALWAYS_DENIED)
static void checkNewFileSystemFromUri() throws IOException {
try (var fs = FileSystems.getDefault().provider().newFileSystem(URI.create("/dummy/path"), Map.of())) {}
}
@EntitlementTest(expectedAccess = ALWAYS_DENIED)
static void checkNewFileSystemFromPath() {
var fs = FileSystems.getDefault().provider();
try (var newFs = fs.newFileSystem(Path.of("/dummy/path"), Map.of())) {} catch (IOException e) {
// When entitled, we expect to throw IOException, as the path is not valid - we don't really want to create a FS
}
}
@EntitlementTest(expectedAccess = PLUGINS)
static void checkNewInputStream() throws IOException {
var fs = FileSystems.getDefault().provider();
try (var is = fs.newInputStream(FileCheckActions.readFile())) {}
}
@EntitlementTest(expectedAccess = PLUGINS)
static void checkNewOutputStream() throws IOException {
var fs = FileSystems.getDefault().provider();
try (var os = fs.newOutputStream(FileCheckActions.readWriteFile())) {}
}
@EntitlementTest(expectedAccess = PLUGINS)
static void checkNewFileChannelRead() throws IOException {
var fs = FileSystems.getDefault().provider();
try (var fc = fs.newFileChannel(FileCheckActions.readFile(), Set.of(StandardOpenOption.READ))) {}
}
@EntitlementTest(expectedAccess = PLUGINS)
static void checkNewFileChannelWrite() throws IOException {
var fs = FileSystems.getDefault().provider();
try (var fc = fs.newFileChannel(FileCheckActions.readWriteFile(), Set.of(StandardOpenOption.WRITE))) {}
}
@EntitlementTest(expectedAccess = PLUGINS)
static void checkNewAsynchronousFileChannel() throws IOException {
var fs = FileSystems.getDefault().provider();
try (
var fc = fs.newAsynchronousFileChannel(
FileCheckActions.readWriteFile(),
Set.of(StandardOpenOption.WRITE),
EsExecutors.DIRECT_EXECUTOR_SERVICE
)
) {}
}
@EntitlementTest(expectedAccess = PLUGINS)
static void checkNewByteChannel() throws IOException {
var fs = FileSystems.getDefault().provider();
try (var bc = fs.newByteChannel(FileCheckActions.readWriteFile(), Set.of(StandardOpenOption.WRITE))) {}
}
@EntitlementTest(expectedAccess = PLUGINS)
static void checkNewDirectoryStream() throws IOException {
var fs = FileSystems.getDefault().provider();
try (var bc = fs.newDirectoryStream(FileCheckActions.readDir(), entry -> false)) {}
}
@EntitlementTest(expectedAccess = PLUGINS)
static void checkCreateDirectory() throws IOException {
var fs = FileSystems.getDefault().provider();
var directory = EntitledActions.createTempDirectoryForWrite();
fs.createDirectory(directory.resolve("subdir"));
}
@EntitlementTest(expectedAccess = PLUGINS)
static void checkCreateSymbolicLink() throws IOException {
var fs = FileSystems.getDefault().provider();
var directory = EntitledActions.createTempDirectoryForWrite();
try {
fs.createSymbolicLink(directory.resolve("link"), FileCheckActions.readFile());
} catch (UnsupportedOperationException | FileSystemException e) {
// OK not to implement symbolic link in the filesystem
}
}
@EntitlementTest(expectedAccess = PLUGINS)
static void checkCreateLink() throws IOException {
var fs = FileSystems.getDefault().provider();
var directory = EntitledActions.createTempDirectoryForWrite();
try {
fs.createLink(directory.resolve("link"), FileCheckActions.readFile());
} catch (UnsupportedOperationException | FileSystemException e) {
// OK not to implement symbolic link in the filesystem
}
}
@EntitlementTest(expectedAccess = PLUGINS)
static void checkDelete() throws IOException {
var fs = FileSystems.getDefault().provider();
var file = EntitledActions.createTempFileForWrite();
fs.delete(file);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void checkDeleteIfExists() throws IOException {
var fs = FileSystems.getDefault().provider();
var file = EntitledActions.createTempFileForWrite();
fs.deleteIfExists(file);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void checkReadSymbolicLink() throws IOException {
var fs = FileSystems.getDefault().provider();
var link = EntitledActions.createTempSymbolicLink();
fs.readSymbolicLink(link);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void checkCopy() throws IOException {
var fs = FileSystems.getDefault().provider();
var directory = EntitledActions.createTempDirectoryForWrite();
fs.copy(FileCheckActions.readFile(), directory.resolve("copied"));
}
@EntitlementTest(expectedAccess = PLUGINS)
static void checkMove() throws IOException {
var fs = FileSystems.getDefault().provider();
var directory = EntitledActions.createTempDirectoryForWrite();
var file = EntitledActions.createTempFileForWrite();
fs.move(file, directory.resolve("moved"));
}
@EntitlementTest(expectedAccess = PLUGINS)
static void checkIsSameFile() throws IOException {
var fs = FileSystems.getDefault().provider();
fs.isSameFile(FileCheckActions.readWriteFile(), FileCheckActions.readFile());
}
@EntitlementTest(expectedAccess = PLUGINS)
static void checkIsHidden() throws IOException {
var fs = FileSystems.getDefault().provider();
fs.isHidden(FileCheckActions.readFile());
}
@EntitlementTest(expectedAccess = PLUGINS)
static void checkGetFileStore() throws IOException {
var fs = FileSystems.getDefault().provider();
var file = EntitledActions.createTempFileForRead();
var store = fs.getFileStore(file);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void checkCheckAccess() throws IOException {
var fs = FileSystems.getDefault().provider();
fs.checkAccess(FileCheckActions.readFile());
}
@EntitlementTest(expectedAccess = ALWAYS_DENIED)
static void checkGetFileAttributeView() {
var fs = FileSystems.getDefault().provider();
fs.getFileAttributeView(FileCheckActions.readFile(), FileOwnerAttributeView.class);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void checkReadAttributesWithClass() throws IOException {
var fs = FileSystems.getDefault().provider();
fs.readAttributes(FileCheckActions.readFile(), BasicFileAttributes.class);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void checkReadAttributesWithString() throws IOException {
var fs = FileSystems.getDefault().provider();
fs.readAttributes(FileCheckActions.readFile(), "*");
}
@EntitlementTest(expectedAccess = PLUGINS)
static void checkReadAttributesIfExists() throws IOException {
var fs = FileSystems.getDefault().provider();
fs.readAttributesIfExists(FileCheckActions.readFile(), BasicFileAttributes.class);
}
@EntitlementTest(expectedAccess = PLUGINS)
static void checkSetAttribute() throws IOException {
var fs = FileSystems.getDefault().provider();
var file = EntitledActions.createTempFileForWrite();
try {
fs.setAttribute(file, "dos:hidden", true);
} catch (UnsupportedOperationException | IllegalArgumentException | FileSystemException e) {
// OK if the file does not have/does not support the attribute
}
}
@EntitlementTest(expectedAccess = PLUGINS)
static void checkExists() {
var fs = FileSystems.getDefault().provider();
fs.exists(FileCheckActions.readFile());
}
private NioFileSystemActions() {}
}

View file

@ -181,11 +181,17 @@ public class RestEntitlementsCheckAction extends BaseRestHandler {
entry("runtime_load_library", forPlugins(LoadNativeLibrariesCheckActions::runtimeLoadLibrary)), entry("runtime_load_library", forPlugins(LoadNativeLibrariesCheckActions::runtimeLoadLibrary)),
entry("system_load", forPlugins(LoadNativeLibrariesCheckActions::systemLoad)), entry("system_load", forPlugins(LoadNativeLibrariesCheckActions::systemLoad)),
entry("system_load_library", forPlugins(LoadNativeLibrariesCheckActions::systemLoadLibrary)) entry("system_load_library", forPlugins(LoadNativeLibrariesCheckActions::systemLoadLibrary))
// MAINTENANCE NOTE: Please don't add any more entries to this map.
// Put new tests into their own "Actions" class using the @EntitlementTest annotation.
), ),
getTestEntries(FileCheckActions.class), getTestEntries(FileCheckActions.class),
getTestEntries(FileStoreActions.class),
getTestEntries(ManageThreadsActions.class),
getTestEntries(NativeActions.class),
getTestEntries(NioFileSystemActions.class),
getTestEntries(SpiActions.class), getTestEntries(SpiActions.class),
getTestEntries(SystemActions.class), getTestEntries(SystemActions.class)
getTestEntries(NativeActions.class)
) )
.flatMap(Function.identity()) .flatMap(Function.identity())
.filter(entry -> entry.getValue().fromJavaVersion() == null || Runtime.version().feature() >= entry.getValue().fromJavaVersion()) .filter(entry -> entry.getValue().fromJavaVersion() == null || Runtime.version().feature() >= entry.getValue().fromJavaVersion())
@ -422,7 +428,9 @@ public class RestEntitlementsCheckAction extends BaseRestHandler {
return channel -> { return channel -> {
logger.info("Calling check action [{}]", actionName); logger.info("Calling check action [{}]", actionName);
checkAction.action().run(); checkAction.action().run();
logger.debug("Check action [{}] returned", actionName);
channel.sendResponse(new RestResponse(RestStatus.OK, Strings.format("Succesfully executed action [%s]", actionName))); channel.sendResponse(new RestResponse(RestStatus.OK, Strings.format("Succesfully executed action [%s]", actionName)));
}; };
} }
} }

View file

@ -29,6 +29,7 @@ public abstract class AbstractEntitlementsIT extends ESRestTestCase {
builder.value("inbound_network"); builder.value("inbound_network");
builder.value("outbound_network"); builder.value("outbound_network");
builder.value("load_native_libraries"); builder.value("load_native_libraries");
builder.value("manage_threads");
builder.value( builder.value(
Map.of( Map.of(
"write_system_properties", "write_system_properties",

View file

@ -33,7 +33,7 @@ class EntitlementsTestRule implements TestRule {
// entitlements that test methods may use, see EntitledActions // entitlements that test methods may use, see EntitledActions
private static final PolicyBuilder ENTITLED_POLICY = (builder, tempDir) -> { private static final PolicyBuilder ENTITLED_POLICY = (builder, tempDir) -> {
builder.value(Map.of("write_system_properties", Map.of("properties", List.of("org.elasticsearch.entitlement.qa.selfTest")))); builder.value("manage_threads");
builder.value( builder.value(
Map.of( Map.of(
"files", "files",
@ -74,6 +74,8 @@ class EntitlementsTestRule implements TestRule {
.systemProperty("es.entitlements.enabled", "true") .systemProperty("es.entitlements.enabled", "true")
.systemProperty("es.entitlements.testdir", () -> testDir.getRoot().getAbsolutePath()) .systemProperty("es.entitlements.testdir", () -> testDir.getRoot().getAbsolutePath())
.setting("xpack.security.enabled", "false") .setting("xpack.security.enabled", "false")
// Logs in libs/entitlement/qa/build/test-results/javaRestTest/TEST-org.elasticsearch.entitlement.qa.EntitlementsXXX.xml
// .setting("logger.org.elasticsearch.entitlement", "DEBUG")
.build(); .build();
ruleChain = RuleChain.outerRule(testDir).around(tempDirSetup).around(cluster); ruleChain = RuleChain.outerRule(testDir).around(tempDirSetup).around(cluster);
} }

View file

@ -15,7 +15,6 @@ import com.sun.tools.attach.AttachNotSupportedException;
import com.sun.tools.attach.VirtualMachine; import com.sun.tools.attach.VirtualMachine;
import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.CheckedConsumer;
import org.elasticsearch.core.CheckedSupplier;
import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.SuppressForbidden;
import org.elasticsearch.entitlement.initialization.EntitlementInitialization; import org.elasticsearch.entitlement.initialization.EntitlementInitialization;
import org.elasticsearch.entitlement.runtime.api.NotEntitledException; import org.elasticsearch.entitlement.runtime.api.NotEntitledException;
@ -27,7 +26,6 @@ import java.io.IOException;
import java.lang.reflect.InvocationTargetException; import java.lang.reflect.InvocationTargetException;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.attribute.FileAttribute;
import java.util.Map; import java.util.Map;
import java.util.function.Function; import java.util.function.Function;
@ -149,11 +147,8 @@ public class EntitlementBootstrap {
*/ */
private static void selfTest() { private static void selfTest() {
ensureCannotStartProcess(ProcessBuilder::start); ensureCannotStartProcess(ProcessBuilder::start);
ensureCanCreateTempFile(EntitlementBootstrap::createTempFile);
// Try again with reflection // Try again with reflection
ensureCannotStartProcess(EntitlementBootstrap::reflectiveStartProcess); ensureCannotStartProcess(EntitlementBootstrap::reflectiveStartProcess);
ensureCanCreateTempFile(EntitlementBootstrap::reflectiveCreateTempFile);
} }
private static void ensureCannotStartProcess(CheckedConsumer<ProcessBuilder, ?> startProcess) { private static void ensureCannotStartProcess(CheckedConsumer<ProcessBuilder, ?> startProcess) {
@ -169,31 +164,6 @@ public class EntitlementBootstrap {
throw new IllegalStateException("Entitlement protection self-test was incorrectly permitted"); throw new IllegalStateException("Entitlement protection self-test was incorrectly permitted");
} }
@SuppressForbidden(reason = "accesses jvm default tempdir as a self-test")
private static void ensureCanCreateTempFile(CheckedSupplier<Path, ?> createTempFile) {
try {
Path p = createTempFile.get();
p.toFile().deleteOnExit();
// Make an effort to clean up the file immediately; also, deleteOnExit leaves the file if the JVM exits abnormally.
try {
Files.delete(p);
} catch (IOException ignored) {
// Can be caused by virus scanner
}
} catch (NotEntitledException e) {
throw new IllegalStateException("Entitlement protection self-test was incorrectly forbidden", e);
} catch (Exception e) {
throw new IllegalStateException("Unable to perform entitlement protection self-test", e);
}
logger.debug("Success: Entitlement protection correctly permitted temp file creation");
}
@SuppressForbidden(reason = "accesses jvm default tempdir as a self-test")
private static Path createTempFile() throws Exception {
return Files.createTempFile(null, null);
}
private static void reflectiveStartProcess(ProcessBuilder pb) throws Exception { private static void reflectiveStartProcess(ProcessBuilder pb) throws Exception {
try { try {
var start = ProcessBuilder.class.getMethod("start"); var start = ProcessBuilder.class.getMethod("start");
@ -203,10 +173,5 @@ public class EntitlementBootstrap {
} }
} }
private static Path reflectiveCreateTempFile() throws Exception {
return (Path) Files.class.getMethod("createTempFile", String.class, String.class, FileAttribute[].class)
.invoke(null, null, null, new FileAttribute<?>[0]);
}
private static final Logger logger = LogManager.getLogger(EntitlementBootstrap.class); private static final Logger logger = LogManager.getLogger(EntitlementBootstrap.class);
} }

View file

@ -24,25 +24,42 @@ import org.elasticsearch.entitlement.runtime.policy.Scope;
import org.elasticsearch.entitlement.runtime.policy.entitlements.CreateClassLoaderEntitlement; import org.elasticsearch.entitlement.runtime.policy.entitlements.CreateClassLoaderEntitlement;
import org.elasticsearch.entitlement.runtime.policy.entitlements.Entitlement; import org.elasticsearch.entitlement.runtime.policy.entitlements.Entitlement;
import org.elasticsearch.entitlement.runtime.policy.entitlements.ExitVMEntitlement; import org.elasticsearch.entitlement.runtime.policy.entitlements.ExitVMEntitlement;
import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement;
import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement.FileData;
import org.elasticsearch.entitlement.runtime.policy.entitlements.InboundNetworkEntitlement; import org.elasticsearch.entitlement.runtime.policy.entitlements.InboundNetworkEntitlement;
import org.elasticsearch.entitlement.runtime.policy.entitlements.LoadNativeLibrariesEntitlement; import org.elasticsearch.entitlement.runtime.policy.entitlements.LoadNativeLibrariesEntitlement;
import org.elasticsearch.entitlement.runtime.policy.entitlements.ManageThreadsEntitlement;
import org.elasticsearch.entitlement.runtime.policy.entitlements.OutboundNetworkEntitlement; import org.elasticsearch.entitlement.runtime.policy.entitlements.OutboundNetworkEntitlement;
import org.elasticsearch.entitlement.runtime.policy.entitlements.ReadStoreAttributesEntitlement;
import java.lang.instrument.Instrumentation; import java.lang.instrument.Instrumentation;
import java.lang.reflect.Constructor; import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException; import java.lang.reflect.InvocationTargetException;
import java.net.URI;
import java.nio.channels.spi.SelectorProvider; import java.nio.channels.spi.SelectorProvider;
import java.nio.file.AccessMode;
import java.nio.file.CopyOption;
import java.nio.file.DirectoryStream;
import java.nio.file.FileStore;
import java.nio.file.FileSystems; import java.nio.file.FileSystems;
import java.nio.file.LinkOption;
import java.nio.file.OpenOption; import java.nio.file.OpenOption;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.attribute.FileAttribute;
import java.nio.file.spi.FileSystemProvider; import java.nio.file.spi.FileSystemProvider;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.function.Function;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import static org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement.Mode.READ_WRITE;
/** /**
* Called by the agent during {@code agentmain} to configure the entitlement system, * Called by the agent during {@code agentmain} to configure the entitlement system,
@ -58,6 +75,11 @@ public class EntitlementInitialization {
private static ElasticsearchEntitlementChecker manager; private static ElasticsearchEntitlementChecker manager;
interface InstrumentationInfoFactory {
InstrumentationService.InstrumentationInfo of(String methodName, Class<?>... parameterTypes) throws ClassNotFoundException,
NoSuchMethodException;
}
// Note: referenced by bridge reflectively // Note: referenced by bridge reflectively
public static EntitlementChecker checker() { public static EntitlementChecker checker() {
return manager; return manager;
@ -70,25 +92,21 @@ public class EntitlementInitialization {
var latestCheckerInterface = getVersionSpecificCheckerClass(EntitlementChecker.class); var latestCheckerInterface = getVersionSpecificCheckerClass(EntitlementChecker.class);
Map<MethodKey, CheckMethod> checkMethods = new HashMap<>(INSTRUMENTATION_SERVICE.lookupMethods(latestCheckerInterface)); Map<MethodKey, CheckMethod> checkMethods = new HashMap<>(INSTRUMENTATION_SERVICE.lookupMethods(latestCheckerInterface));
var fileSystemProviderClass = FileSystems.getDefault().provider().getClass();
Stream.of( Stream.of(
INSTRUMENTATION_SERVICE.lookupImplementationMethod( fileSystemProviderChecks(),
FileSystemProvider.class, fileStoreChecks(),
"newInputStream", Stream.of(
fileSystemProviderClass, INSTRUMENTATION_SERVICE.lookupImplementationMethod(
EntitlementChecker.class, SelectorProvider.class,
"checkNewInputStream", "inheritedChannel",
Path.class, SelectorProvider.provider().getClass(),
OpenOption[].class EntitlementChecker.class,
), "checkSelectorProviderInheritedChannel"
INSTRUMENTATION_SERVICE.lookupImplementationMethod( )
SelectorProvider.class,
"inheritedChannel",
SelectorProvider.provider().getClass(),
EntitlementChecker.class,
"checkSelectorProviderInheritedChannel"
) )
).forEach(instrumentation -> checkMethods.put(instrumentation.targetMethod(), instrumentation.checkMethod())); )
.flatMap(Function.identity())
.forEach(instrumentation -> checkMethods.put(instrumentation.targetMethod(), instrumentation.checkMethod()));
var classesToTransform = checkMethods.keySet().stream().map(MethodKey::className).collect(Collectors.toSet()); var classesToTransform = checkMethods.keySet().stream().map(MethodKey::className).collect(Collectors.toSet());
@ -109,6 +127,8 @@ public class EntitlementInitialization {
private static PolicyManager createPolicyManager() { private static PolicyManager createPolicyManager() {
Map<String, Policy> pluginPolicies = EntitlementBootstrap.bootstrapArgs().pluginPolicies(); Map<String, Policy> pluginPolicies = EntitlementBootstrap.bootstrapArgs().pluginPolicies();
Path[] dataDirs = EntitlementBootstrap.bootstrapArgs().dataDirs();
Path tempDir = EntitlementBootstrap.bootstrapArgs().tempDir();
// TODO(ES-10031): Decide what goes in the elasticsearch default policy and extend it // TODO(ES-10031): Decide what goes in the elasticsearch default policy and extend it
var serverPolicy = new Policy( var serverPolicy = new Policy(
@ -120,23 +140,131 @@ public class EntitlementInitialization {
"org.elasticsearch.server", "org.elasticsearch.server",
List.of( List.of(
new ExitVMEntitlement(), new ExitVMEntitlement(),
new ReadStoreAttributesEntitlement(),
new CreateClassLoaderEntitlement(), new CreateClassLoaderEntitlement(),
new InboundNetworkEntitlement(), new InboundNetworkEntitlement(),
new OutboundNetworkEntitlement(), new OutboundNetworkEntitlement(),
new LoadNativeLibrariesEntitlement() new LoadNativeLibrariesEntitlement(),
new ManageThreadsEntitlement(),
new FilesEntitlement(
List.of(new FilesEntitlement.FileData(EntitlementBootstrap.bootstrapArgs().tempDir().toString(), READ_WRITE))
)
) )
), ),
new Scope("org.apache.httpcomponents.httpclient", List.of(new OutboundNetworkEntitlement())), new Scope("org.apache.httpcomponents.httpclient", List.of(new OutboundNetworkEntitlement())),
new Scope("io.netty.transport", List.of(new InboundNetworkEntitlement(), new OutboundNetworkEntitlement())), new Scope("io.netty.transport", List.of(new InboundNetworkEntitlement(), new OutboundNetworkEntitlement())),
new Scope("org.apache.lucene.core", List.of(new LoadNativeLibrariesEntitlement())), new Scope("org.apache.lucene.core", List.of(new LoadNativeLibrariesEntitlement(), new ManageThreadsEntitlement())),
new Scope("org.elasticsearch.nativeaccess", List.of(new LoadNativeLibrariesEntitlement())) new Scope("org.apache.logging.log4j.core", List.of(new ManageThreadsEntitlement())),
new Scope(
"org.elasticsearch.nativeaccess",
List.of(
new LoadNativeLibrariesEntitlement(),
new FilesEntitlement(Arrays.stream(dataDirs).map(d -> new FileData(d.toString(), READ_WRITE)).toList())
)
)
) )
); );
// agents run without a module, so this is a special hack for the apm agent // agents run without a module, so this is a special hack for the apm agent
// this should be removed once https://github.com/elastic/elasticsearch/issues/109335 is completed // this should be removed once https://github.com/elastic/elasticsearch/issues/109335 is completed
List<Entitlement> agentEntitlements = List.of(new CreateClassLoaderEntitlement()); List<Entitlement> agentEntitlements = List.of(new CreateClassLoaderEntitlement(), new ManageThreadsEntitlement());
var resolver = EntitlementBootstrap.bootstrapArgs().pluginResolver(); var resolver = EntitlementBootstrap.bootstrapArgs().pluginResolver();
return new PolicyManager(serverPolicy, agentEntitlements, pluginPolicies, resolver, AGENTS_PACKAGE_NAME, ENTITLEMENTS_MODULE); return new PolicyManager(
serverPolicy,
agentEntitlements,
pluginPolicies,
resolver,
AGENTS_PACKAGE_NAME,
ENTITLEMENTS_MODULE,
tempDir
);
}
private static Stream<InstrumentationService.InstrumentationInfo> fileSystemProviderChecks() throws ClassNotFoundException,
NoSuchMethodException {
var fileSystemProviderClass = FileSystems.getDefault().provider().getClass();
var instrumentation = new InstrumentationInfoFactory() {
@Override
public InstrumentationService.InstrumentationInfo of(String methodName, Class<?>... parameterTypes)
throws ClassNotFoundException, NoSuchMethodException {
return INSTRUMENTATION_SERVICE.lookupImplementationMethod(
FileSystemProvider.class,
methodName,
fileSystemProviderClass,
EntitlementChecker.class,
"check" + Character.toUpperCase(methodName.charAt(0)) + methodName.substring(1),
parameterTypes
);
}
};
return Stream.of(
instrumentation.of("newFileSystem", URI.class, Map.class),
instrumentation.of("newFileSystem", Path.class, Map.class),
instrumentation.of("newInputStream", Path.class, OpenOption[].class),
instrumentation.of("newOutputStream", Path.class, OpenOption[].class),
instrumentation.of("newFileChannel", Path.class, Set.class, FileAttribute[].class),
instrumentation.of("newAsynchronousFileChannel", Path.class, Set.class, ExecutorService.class, FileAttribute[].class),
instrumentation.of("newByteChannel", Path.class, Set.class, FileAttribute[].class),
instrumentation.of("newDirectoryStream", Path.class, DirectoryStream.Filter.class),
instrumentation.of("createDirectory", Path.class, FileAttribute[].class),
instrumentation.of("createSymbolicLink", Path.class, Path.class, FileAttribute[].class),
instrumentation.of("createLink", Path.class, Path.class),
instrumentation.of("delete", Path.class),
instrumentation.of("deleteIfExists", Path.class),
instrumentation.of("readSymbolicLink", Path.class),
instrumentation.of("copy", Path.class, Path.class, CopyOption[].class),
instrumentation.of("move", Path.class, Path.class, CopyOption[].class),
instrumentation.of("isSameFile", Path.class, Path.class),
instrumentation.of("isHidden", Path.class),
instrumentation.of("getFileStore", Path.class),
instrumentation.of("checkAccess", Path.class, AccessMode[].class),
instrumentation.of("getFileAttributeView", Path.class, Class.class, LinkOption[].class),
instrumentation.of("readAttributes", Path.class, Class.class, LinkOption[].class),
instrumentation.of("readAttributes", Path.class, String.class, LinkOption[].class),
instrumentation.of("readAttributesIfExists", Path.class, Class.class, LinkOption[].class),
instrumentation.of("setAttribute", Path.class, String.class, Object.class, LinkOption[].class),
instrumentation.of("exists", Path.class, LinkOption[].class)
);
}
private static Stream<InstrumentationService.InstrumentationInfo> fileStoreChecks() {
var fileStoreClasses = StreamSupport.stream(FileSystems.getDefault().getFileStores().spliterator(), false)
.map(FileStore::getClass)
.distinct();
return fileStoreClasses.flatMap(fileStoreClass -> {
var instrumentation = new InstrumentationInfoFactory() {
@Override
public InstrumentationService.InstrumentationInfo of(String methodName, Class<?>... parameterTypes)
throws ClassNotFoundException, NoSuchMethodException {
return INSTRUMENTATION_SERVICE.lookupImplementationMethod(
FileStore.class,
methodName,
fileStoreClass,
EntitlementChecker.class,
"check" + Character.toUpperCase(methodName.charAt(0)) + methodName.substring(1),
parameterTypes
);
}
};
try {
return Stream.of(
instrumentation.of("getFileStoreAttributeView", Class.class),
instrumentation.of("getAttribute", String.class),
instrumentation.of("getBlockSize"),
instrumentation.of("getTotalSpace"),
instrumentation.of("getUnallocatedSpace"),
instrumentation.of("getUsableSpace"),
instrumentation.of("isReadOnly"),
instrumentation.of("name"),
instrumentation.of("type")
);
} catch (NoSuchMethodException | ClassNotFoundException e) {
throw new RuntimeException(e);
}
});
} }
/** /**

View file

@ -9,7 +9,6 @@
package org.elasticsearch.entitlement.instrumentation; package org.elasticsearch.entitlement.instrumentation;
import java.io.IOException;
import java.util.Map; import java.util.Map;
/** /**
@ -23,7 +22,7 @@ public interface InstrumentationService {
Instrumenter newInstrumenter(Class<?> clazz, Map<MethodKey, CheckMethod> methods); Instrumenter newInstrumenter(Class<?> clazz, Map<MethodKey, CheckMethod> methods);
Map<MethodKey, CheckMethod> lookupMethods(Class<?> clazz) throws IOException; Map<MethodKey, CheckMethod> lookupMethods(Class<?> clazz) throws ClassNotFoundException;
InstrumentationInfo lookupImplementationMethod( InstrumentationInfo lookupImplementationMethod(
Class<?> targetSuperclass, Class<?> targetSuperclass,

View file

@ -40,6 +40,7 @@ import java.net.ServerSocket;
import java.net.Socket; import java.net.Socket;
import java.net.SocketAddress; import java.net.SocketAddress;
import java.net.SocketImplFactory; import java.net.SocketImplFactory;
import java.net.URI;
import java.net.URL; import java.net.URL;
import java.net.URLStreamHandler; import java.net.URLStreamHandler;
import java.net.URLStreamHandlerFactory; import java.net.URLStreamHandlerFactory;
@ -55,16 +56,26 @@ import java.nio.channels.ServerSocketChannel;
import java.nio.channels.SocketChannel; import java.nio.channels.SocketChannel;
import java.nio.channels.spi.SelectorProvider; import java.nio.channels.spi.SelectorProvider;
import java.nio.charset.Charset; import java.nio.charset.Charset;
import java.nio.file.AccessMode;
import java.nio.file.CopyOption;
import java.nio.file.DirectoryStream;
import java.nio.file.FileStore;
import java.nio.file.LinkOption; import java.nio.file.LinkOption;
import java.nio.file.OpenOption; import java.nio.file.OpenOption;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.nio.file.attribute.FileAttribute;
import java.nio.file.attribute.UserPrincipal; import java.nio.file.attribute.UserPrincipal;
import java.nio.file.spi.FileSystemProvider; import java.nio.file.spi.FileSystemProvider;
import java.security.cert.CertStoreParameters; import java.security.cert.CertStoreParameters;
import java.util.List; import java.util.List;
import java.util.Locale; import java.util.Locale;
import java.util.Map;
import java.util.Properties; import java.util.Properties;
import java.util.Set;
import java.util.TimeZone; import java.util.TimeZone;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ForkJoinPool;
import java.util.function.Consumer; import java.util.function.Consumer;
import javax.net.ssl.HostnameVerifier; import javax.net.ssl.HostnameVerifier;
@ -940,6 +951,82 @@ public class ElasticsearchEntitlementChecker implements EntitlementChecker {
// old io (ie File) // old io (ie File)
@Override
public void check$java_io_File$createNewFile(Class<?> callerClass, File file) {
policyManager.checkFileWrite(callerClass, file);
}
@Override
public void check$java_io_File$$createTempFile(Class<?> callerClass, String prefix, String suffix, File directory) {
policyManager.checkFileWrite(callerClass, directory);
}
@Override
public void check$java_io_File$delete(Class<?> callerClass, File file) {
policyManager.checkFileWrite(callerClass, file);
}
@Override
public void check$java_io_File$deleteOnExit(Class<?> callerClass, File file) {
policyManager.checkFileWrite(callerClass, file);
}
@Override
public void check$java_io_File$mkdir(Class<?> callerClass, File file) {
policyManager.checkFileWrite(callerClass, file);
}
@Override
public void check$java_io_File$mkdirs(Class<?> callerClass, File file) {
policyManager.checkFileWrite(callerClass, file);
}
@Override
public void check$java_io_File$renameTo(Class<?> callerClass, File file, File dest) {
policyManager.checkFileRead(callerClass, file);
policyManager.checkFileWrite(callerClass, dest);
}
@Override
public void check$java_io_File$setExecutable(Class<?> callerClass, File file, boolean executable) {
policyManager.checkFileWrite(callerClass, file);
}
@Override
public void check$java_io_File$setExecutable(Class<?> callerClass, File file, boolean executable, boolean ownerOnly) {
policyManager.checkFileWrite(callerClass, file);
}
@Override
public void check$java_io_File$setLastModified(Class<?> callerClass, File file, long time) {
policyManager.checkFileWrite(callerClass, file);
}
@Override
public void check$java_io_File$setReadable(Class<?> callerClass, File file, boolean readable) {
policyManager.checkFileWrite(callerClass, file);
}
@Override
public void check$java_io_File$setReadable(Class<?> callerClass, File file, boolean readable, boolean ownerOnly) {
policyManager.checkFileWrite(callerClass, file);
}
@Override
public void check$java_io_File$setReadOnly(Class<?> callerClass, File file) {
policyManager.checkFileWrite(callerClass, file);
}
@Override
public void check$java_io_File$setWritable(Class<?> callerClass, File file, boolean writable) {
policyManager.checkFileWrite(callerClass, file);
}
@Override
public void check$java_io_File$setWritable(Class<?> callerClass, File file, boolean writable, boolean ownerOnly) {
policyManager.checkFileWrite(callerClass, file);
}
@Override @Override
public void check$java_io_FileOutputStream$(Class<?> callerClass, String name) { public void check$java_io_FileOutputStream$(Class<?> callerClass, String name) {
policyManager.checkFileWrite(callerClass, new File(name)); policyManager.checkFileWrite(callerClass, new File(name));
@ -994,8 +1081,292 @@ public class ElasticsearchEntitlementChecker implements EntitlementChecker {
// file system providers // file system providers
@Override
public void check$java_nio_file_spi_FileSystemProvider$(Class<?> callerClass) {
policyManager.checkChangeJVMGlobalState(callerClass);
}
@Override
public void checkNewFileSystem(Class<?> callerClass, FileSystemProvider that, URI uri, Map<String, ?> env) {
policyManager.checkChangeJVMGlobalState(callerClass);
}
@Override
public void checkNewFileSystem(Class<?> callerClass, FileSystemProvider that, Path path, Map<String, ?> env) {
policyManager.checkChangeJVMGlobalState(callerClass);
}
@Override @Override
public void checkNewInputStream(Class<?> callerClass, FileSystemProvider that, Path path, OpenOption... options) { public void checkNewInputStream(Class<?> callerClass, FileSystemProvider that, Path path, OpenOption... options) {
// TODO: policyManger.checkFileSystemRead(path); policyManager.checkFileRead(callerClass, path);
}
@Override
public void checkNewOutputStream(Class<?> callerClass, FileSystemProvider that, Path path, OpenOption... options) {
policyManager.checkFileWrite(callerClass, path);
}
private static boolean isOpenForWrite(Set<? extends OpenOption> options) {
return options.contains(StandardOpenOption.WRITE)
|| options.contains(StandardOpenOption.APPEND)
|| options.contains(StandardOpenOption.CREATE)
|| options.contains(StandardOpenOption.CREATE_NEW)
|| options.contains(StandardOpenOption.DELETE_ON_CLOSE);
}
@Override
public void checkNewFileChannel(
Class<?> callerClass,
FileSystemProvider that,
Path path,
Set<? extends OpenOption> options,
FileAttribute<?>... attrs
) {
if (isOpenForWrite(options)) {
policyManager.checkFileWrite(callerClass, path);
} else {
policyManager.checkFileRead(callerClass, path);
}
}
@Override
public void checkNewAsynchronousFileChannel(
Class<?> callerClass,
FileSystemProvider that,
Path path,
Set<? extends OpenOption> options,
ExecutorService executor,
FileAttribute<?>... attrs
) {
if (isOpenForWrite(options)) {
policyManager.checkFileWrite(callerClass, path);
} else {
policyManager.checkFileRead(callerClass, path);
}
}
@Override
public void checkNewByteChannel(
Class<?> callerClass,
FileSystemProvider that,
Path path,
Set<? extends OpenOption> options,
FileAttribute<?>... attrs
) {
if (isOpenForWrite(options)) {
policyManager.checkFileWrite(callerClass, path);
} else {
policyManager.checkFileRead(callerClass, path);
}
}
@Override
public void checkNewDirectoryStream(
Class<?> callerClass,
FileSystemProvider that,
Path dir,
DirectoryStream.Filter<? super Path> filter
) {
policyManager.checkFileRead(callerClass, dir);
}
@Override
public void checkCreateDirectory(Class<?> callerClass, FileSystemProvider that, Path dir, FileAttribute<?>... attrs) {
policyManager.checkFileWrite(callerClass, dir);
}
@Override
public void checkCreateSymbolicLink(Class<?> callerClass, FileSystemProvider that, Path link, Path target, FileAttribute<?>... attrs) {
policyManager.checkFileWrite(callerClass, link);
policyManager.checkFileRead(callerClass, target);
}
@Override
public void checkCreateLink(Class<?> callerClass, FileSystemProvider that, Path link, Path existing) {
policyManager.checkFileWrite(callerClass, link);
policyManager.checkFileRead(callerClass, existing);
}
@Override
public void checkDelete(Class<?> callerClass, FileSystemProvider that, Path path) {
policyManager.checkFileWrite(callerClass, path);
}
@Override
public void checkDeleteIfExists(Class<?> callerClass, FileSystemProvider that, Path path) {
policyManager.checkFileWrite(callerClass, path);
}
@Override
public void checkReadSymbolicLink(Class<?> callerClass, FileSystemProvider that, Path link) {
policyManager.checkFileRead(callerClass, link);
}
@Override
public void checkCopy(Class<?> callerClass, FileSystemProvider that, Path source, Path target, CopyOption... options) {
policyManager.checkFileWrite(callerClass, target);
policyManager.checkFileRead(callerClass, source);
}
@Override
public void checkMove(Class<?> callerClass, FileSystemProvider that, Path source, Path target, CopyOption... options) {
policyManager.checkFileWrite(callerClass, target);
policyManager.checkFileWrite(callerClass, source);
}
@Override
public void checkIsSameFile(Class<?> callerClass, FileSystemProvider that, Path path, Path path2) {
policyManager.checkFileRead(callerClass, path);
policyManager.checkFileRead(callerClass, path2);
}
@Override
public void checkIsHidden(Class<?> callerClass, FileSystemProvider that, Path path) {
policyManager.checkFileRead(callerClass, path);
}
@Override
public void checkGetFileStore(Class<?> callerClass, FileSystemProvider that, Path path) {
policyManager.checkFileRead(callerClass, path);
}
@Override
public void checkCheckAccess(Class<?> callerClass, FileSystemProvider that, Path path, AccessMode... modes) {
policyManager.checkFileRead(callerClass, path);
}
@Override
public void checkGetFileAttributeView(Class<?> callerClass, FileSystemProvider that, Path path, Class<?> type, LinkOption... options) {
policyManager.checkGetFileAttributeView(callerClass);
}
@Override
public void checkReadAttributes(Class<?> callerClass, FileSystemProvider that, Path path, Class<?> type, LinkOption... options) {
policyManager.checkFileRead(callerClass, path);
}
@Override
public void checkReadAttributes(Class<?> callerClass, FileSystemProvider that, Path path, String attributes, LinkOption... options) {
policyManager.checkFileRead(callerClass, path);
}
@Override
public void checkReadAttributesIfExists(
Class<?> callerClass,
FileSystemProvider that,
Path path,
Class<?> type,
LinkOption... options
) {
policyManager.checkFileRead(callerClass, path);
}
@Override
public void checkSetAttribute(
Class<?> callerClass,
FileSystemProvider that,
Path path,
String attribute,
Object value,
LinkOption... options
) {
policyManager.checkFileWrite(callerClass, path);
}
@Override
public void checkExists(Class<?> callerClass, FileSystemProvider that, Path path, LinkOption... options) {
policyManager.checkFileRead(callerClass, path);
}
// Thread management
@Override
public void check$java_lang_Thread$start(Class<?> callerClass, Thread thread) {
policyManager.checkManageThreadsEntitlement(callerClass);
}
@Override
public void check$java_lang_Thread$setDaemon(Class<?> callerClass, Thread thread, boolean on) {
policyManager.checkManageThreadsEntitlement(callerClass);
}
@Override
public void check$java_lang_ThreadGroup$setDaemon(Class<?> callerClass, ThreadGroup threadGroup, boolean daemon) {
policyManager.checkManageThreadsEntitlement(callerClass);
}
@Override
public void check$java_util_concurrent_ForkJoinPool$setParallelism(Class<?> callerClass, ForkJoinPool forkJoinPool, int size) {
policyManager.checkManageThreadsEntitlement(callerClass);
}
@Override
public void check$java_lang_Thread$setName(Class<?> callerClass, Thread thread, String name) {
policyManager.checkManageThreadsEntitlement(callerClass);
}
@Override
public void check$java_lang_Thread$setPriority(Class<?> callerClass, Thread thread, int newPriority) {
policyManager.checkManageThreadsEntitlement(callerClass);
}
@Override
public void check$java_lang_Thread$setUncaughtExceptionHandler(
Class<?> callerClass,
Thread thread,
Thread.UncaughtExceptionHandler ueh
) {
policyManager.checkManageThreadsEntitlement(callerClass);
}
@Override
public void check$java_lang_ThreadGroup$setMaxPriority(Class<?> callerClass, ThreadGroup threadGroup, int pri) {
policyManager.checkManageThreadsEntitlement(callerClass);
}
@Override
public void checkGetFileStoreAttributeView(Class<?> callerClass, FileStore that, Class<?> type) {
policyManager.checkWriteStoreAttributes(callerClass);
}
@Override
public void checkGetAttribute(Class<?> callerClass, FileStore that, String attribute) {
policyManager.checkReadStoreAttributes(callerClass);
}
@Override
public void checkGetBlockSize(Class<?> callerClass, FileStore that) {
policyManager.checkReadStoreAttributes(callerClass);
}
@Override
public void checkGetTotalSpace(Class<?> callerClass, FileStore that) {
policyManager.checkReadStoreAttributes(callerClass);
}
@Override
public void checkGetUnallocatedSpace(Class<?> callerClass, FileStore that) {
policyManager.checkReadStoreAttributes(callerClass);
}
@Override
public void checkGetUsableSpace(Class<?> callerClass, FileStore that) {
policyManager.checkReadStoreAttributes(callerClass);
}
@Override
public void checkIsReadOnly(Class<?> callerClass, FileStore that) {
policyManager.checkReadStoreAttributes(callerClass);
}
@Override
public void checkName(Class<?> callerClass, FileStore that) {
policyManager.checkReadStoreAttributes(callerClass);
}
@Override
public void checkType(Class<?> callerClass, FileStore that) {
policyManager.checkReadStoreAttributes(callerClass);
} }
} }

View file

@ -20,13 +20,12 @@ import java.util.Objects;
import static org.elasticsearch.core.PathUtils.getDefaultFileSystem; import static org.elasticsearch.core.PathUtils.getDefaultFileSystem;
public final class FileAccessTree { public final class FileAccessTree {
public static final FileAccessTree EMPTY = new FileAccessTree(FilesEntitlement.EMPTY);
private static final String FILE_SEPARATOR = getDefaultFileSystem().getSeparator(); private static final String FILE_SEPARATOR = getDefaultFileSystem().getSeparator();
private final String[] readPaths; private final String[] readPaths;
private final String[] writePaths; private final String[] writePaths;
private FileAccessTree(FilesEntitlement filesEntitlement) { private FileAccessTree(FilesEntitlement filesEntitlement, Path tempDir) {
List<String> readPaths = new ArrayList<>(); List<String> readPaths = new ArrayList<>();
List<String> writePaths = new ArrayList<>(); List<String> writePaths = new ArrayList<>();
for (FilesEntitlement.FileData fileData : filesEntitlement.filesData()) { for (FilesEntitlement.FileData fileData : filesEntitlement.filesData()) {
@ -38,6 +37,10 @@ public final class FileAccessTree {
readPaths.add(path); readPaths.add(path);
} }
// everything has access to the temp dir
readPaths.add(tempDir.toString());
writePaths.add(tempDir.toString());
readPaths.sort(String::compareTo); readPaths.sort(String::compareTo);
writePaths.sort(String::compareTo); writePaths.sort(String::compareTo);
@ -45,8 +48,8 @@ public final class FileAccessTree {
this.writePaths = writePaths.toArray(new String[0]); this.writePaths = writePaths.toArray(new String[0]);
} }
public static FileAccessTree of(FilesEntitlement filesEntitlement) { public static FileAccessTree of(FilesEntitlement filesEntitlement, Path tempDir) {
return new FileAccessTree(filesEntitlement); return new FileAccessTree(filesEntitlement, tempDir);
} }
boolean canRead(Path path) { boolean canRead(Path path) {

View file

@ -19,7 +19,9 @@ import org.elasticsearch.entitlement.runtime.policy.entitlements.ExitVMEntitleme
import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement; import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement;
import org.elasticsearch.entitlement.runtime.policy.entitlements.InboundNetworkEntitlement; import org.elasticsearch.entitlement.runtime.policy.entitlements.InboundNetworkEntitlement;
import org.elasticsearch.entitlement.runtime.policy.entitlements.LoadNativeLibrariesEntitlement; import org.elasticsearch.entitlement.runtime.policy.entitlements.LoadNativeLibrariesEntitlement;
import org.elasticsearch.entitlement.runtime.policy.entitlements.ManageThreadsEntitlement;
import org.elasticsearch.entitlement.runtime.policy.entitlements.OutboundNetworkEntitlement; import org.elasticsearch.entitlement.runtime.policy.entitlements.OutboundNetworkEntitlement;
import org.elasticsearch.entitlement.runtime.policy.entitlements.ReadStoreAttributesEntitlement;
import org.elasticsearch.entitlement.runtime.policy.entitlements.SetHttpsConnectionPropertiesEntitlement; import org.elasticsearch.entitlement.runtime.policy.entitlements.SetHttpsConnectionPropertiesEntitlement;
import org.elasticsearch.entitlement.runtime.policy.entitlements.WriteSystemPropertiesEntitlement; import org.elasticsearch.entitlement.runtime.policy.entitlements.WriteSystemPropertiesEntitlement;
import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.LogManager;
@ -68,24 +70,6 @@ public class PolicyManager {
entitlementsByType = Map.copyOf(entitlementsByType); entitlementsByType = Map.copyOf(entitlementsByType);
} }
public static ModuleEntitlements none(String componentName) {
return new ModuleEntitlements(componentName, Map.of(), FileAccessTree.EMPTY);
}
public static ModuleEntitlements from(String componentName, List<Entitlement> entitlements) {
FilesEntitlement filesEntitlement = FilesEntitlement.EMPTY;
for (Entitlement entitlement : entitlements) {
if (entitlement instanceof FilesEntitlement) {
filesEntitlement = (FilesEntitlement) entitlement;
}
}
return new ModuleEntitlements(
componentName,
entitlements.stream().collect(groupingBy(Entitlement::getClass)),
FileAccessTree.of(filesEntitlement)
);
}
public boolean hasEntitlement(Class<? extends Entitlement> entitlementClass) { public boolean hasEntitlement(Class<? extends Entitlement> entitlementClass) {
return entitlementsByType.containsKey(entitlementClass); return entitlementsByType.containsKey(entitlementClass);
} }
@ -99,12 +83,34 @@ public class PolicyManager {
} }
} }
// pkg private for testing
ModuleEntitlements defaultEntitlements(String componentName) {
return new ModuleEntitlements(componentName, Map.of(), defaultFileAccess);
}
// pkg private for testing
ModuleEntitlements policyEntitlements(String componentName, List<Entitlement> entitlements) {
FilesEntitlement filesEntitlement = FilesEntitlement.EMPTY;
for (Entitlement entitlement : entitlements) {
if (entitlement instanceof FilesEntitlement) {
filesEntitlement = (FilesEntitlement) entitlement;
}
}
return new ModuleEntitlements(
componentName,
entitlements.stream().collect(groupingBy(Entitlement::getClass)),
FileAccessTree.of(filesEntitlement, tempDir)
);
}
final Map<Module, ModuleEntitlements> moduleEntitlementsMap = new ConcurrentHashMap<>(); final Map<Module, ModuleEntitlements> moduleEntitlementsMap = new ConcurrentHashMap<>();
protected final Map<String, List<Entitlement>> serverEntitlements; private final Map<String, List<Entitlement>> serverEntitlements;
protected final List<Entitlement> apmAgentEntitlements; private final List<Entitlement> apmAgentEntitlements;
protected final Map<String, Map<String, List<Entitlement>>> pluginsEntitlements; private final Map<String, Map<String, List<Entitlement>>> pluginsEntitlements;
private final Function<Class<?>, String> pluginResolver; private final Function<Class<?>, String> pluginResolver;
private final Path tempDir;
private final FileAccessTree defaultFileAccess;
public static final String ALL_UNNAMED = "ALL-UNNAMED"; public static final String ALL_UNNAMED = "ALL-UNNAMED";
@ -139,7 +145,8 @@ public class PolicyManager {
Map<String, Policy> pluginPolicies, Map<String, Policy> pluginPolicies,
Function<Class<?>, String> pluginResolver, Function<Class<?>, String> pluginResolver,
String apmAgentPackageName, String apmAgentPackageName,
Module entitlementsModule Module entitlementsModule,
Path tempDir
) { ) {
this.serverEntitlements = buildScopeEntitlementsMap(requireNonNull(serverPolicy)); this.serverEntitlements = buildScopeEntitlementsMap(requireNonNull(serverPolicy));
this.apmAgentEntitlements = apmAgentEntitlements; this.apmAgentEntitlements = apmAgentEntitlements;
@ -149,6 +156,9 @@ public class PolicyManager {
this.pluginResolver = pluginResolver; this.pluginResolver = pluginResolver;
this.apmAgentPackageName = apmAgentPackageName; this.apmAgentPackageName = apmAgentPackageName;
this.entitlementsModule = entitlementsModule; this.entitlementsModule = entitlementsModule;
this.defaultFileAccess = FileAccessTree.of(FilesEntitlement.EMPTY, tempDir);
this.tempDir = tempDir;
for (var e : serverEntitlements.entrySet()) { for (var e : serverEntitlements.entrySet()) {
validateEntitlementsPerModule(SERVER_COMPONENT_NAME, e.getKey(), e.getValue()); validateEntitlementsPerModule(SERVER_COMPONENT_NAME, e.getKey(), e.getValue());
@ -181,6 +191,14 @@ public class PolicyManager {
neverEntitled(callerClass, () -> "start process"); neverEntitled(callerClass, () -> "start process");
} }
public void checkWriteStoreAttributes(Class<?> callerClass) {
neverEntitled(callerClass, () -> "change file store attributes");
}
public void checkReadStoreAttributes(Class<?> callerClass) {
checkEntitlementPresent(callerClass, ReadStoreAttributesEntitlement.class);
}
/** /**
* @param operationDescription is only called when the operation is not trivially allowed, meaning the check is about to fail; * @param operationDescription is only called when the operation is not trivially allowed, meaning the check is about to fail;
* therefore, its performance is not a major concern. * therefore, its performance is not a major concern.
@ -191,7 +209,7 @@ public class PolicyManager {
return; return;
} }
throw new NotEntitledException( notEntitled(
Strings.format( Strings.format(
"Not entitled: component [%s], module [%s], class [%s], operation [%s]", "Not entitled: component [%s], module [%s], class [%s], operation [%s]",
getEntitlements(requestingClass).componentName(), getEntitlements(requestingClass).componentName(),
@ -215,17 +233,19 @@ public class PolicyManager {
} }
public void checkChangeJVMGlobalState(Class<?> callerClass) { public void checkChangeJVMGlobalState(Class<?> callerClass) {
neverEntitled(callerClass, () -> { neverEntitled(callerClass, () -> walkStackForCheckMethodName().orElse("change JVM global state"));
// Look up the check$ method to compose an informative error message. }
// This way, we don't need to painstakingly describe every individual global-state change.
Optional<String> checkMethodName = StackWalker.getInstance() private Optional<String> walkStackForCheckMethodName() {
.walk( // Look up the check$ method to compose an informative error message.
frames -> frames.map(StackFrame::getMethodName) // This way, we don't need to painstakingly describe every individual global-state change.
.dropWhile(not(methodName -> methodName.startsWith(InstrumentationService.CHECK_METHOD_PREFIX))) return StackWalker.getInstance()
.findFirst() .walk(
); frames -> frames.map(StackFrame::getMethodName)
return checkMethodName.map(this::operationDescription).orElse("change JVM global state"); .dropWhile(not(methodName -> methodName.startsWith(InstrumentationService.CHECK_METHOD_PREFIX)))
}); .findFirst()
)
.map(this::operationDescription);
} }
/** /**
@ -248,11 +268,11 @@ public class PolicyManager {
ModuleEntitlements entitlements = getEntitlements(requestingClass); ModuleEntitlements entitlements = getEntitlements(requestingClass);
if (entitlements.fileAccess().canRead(path) == false) { if (entitlements.fileAccess().canRead(path) == false) {
throw new NotEntitledException( notEntitled(
Strings.format( Strings.format(
"Not entitled: component [%s], module [%s], class [%s], entitlement [file], operation [read], path [%s]", "Not entitled: component [%s], module [%s], class [%s], entitlement [file], operation [read], path [%s]",
entitlements.componentName(), entitlements.componentName(),
requestingClass.getModule(), requestingClass.getModule().getName(),
requestingClass, requestingClass,
path path
) )
@ -273,11 +293,11 @@ public class PolicyManager {
ModuleEntitlements entitlements = getEntitlements(requestingClass); ModuleEntitlements entitlements = getEntitlements(requestingClass);
if (entitlements.fileAccess().canWrite(path) == false) { if (entitlements.fileAccess().canWrite(path) == false) {
throw new NotEntitledException( notEntitled(
Strings.format( Strings.format(
"Not entitled: component [%s], module [%s], class [%s], entitlement [file], operation [write], path [%s]", "Not entitled: component [%s], module [%s], class [%s], entitlement [file], operation [write], path [%s]",
entitlements.componentName(), entitlements.componentName(),
requestingClass.getModule(), requestingClass.getModule().getName(),
requestingClass, requestingClass,
path path
) )
@ -285,6 +305,15 @@ public class PolicyManager {
} }
} }
/**
* Invoked when we try to get an arbitrary {@code FileAttributeView} class. Such a class can modify attributes, like owner etc.;
* we could think about introducing checks for each of the operations, but for now we over-approximate this and simply deny when it is
* used directly.
*/
public void checkGetFileAttributeView(Class<?> callerClass) {
neverEntitled(callerClass, () -> "get file attribute view");
}
/** /**
* Check for operations that can access sensitive network information, e.g. secrets, tokens or SSL sessions * Check for operations that can access sensitive network information, e.g. secrets, tokens or SSL sessions
*/ */
@ -322,7 +351,7 @@ public class PolicyManager {
Class<?> requestingClass Class<?> requestingClass
) { ) {
if (classEntitlements.hasEntitlement(entitlementClass) == false) { if (classEntitlements.hasEntitlement(entitlementClass) == false) {
throw new NotEntitledException( notEntitled(
Strings.format( Strings.format(
"Not entitled: component [%s], module [%s], class [%s], entitlement [%s]", "Not entitled: component [%s], module [%s], class [%s], entitlement [%s]",
classEntitlements.componentName(), classEntitlements.componentName(),
@ -362,7 +391,7 @@ public class PolicyManager {
); );
return; return;
} }
throw new NotEntitledException( notEntitled(
Strings.format( Strings.format(
"Not entitled: component [%s], module [%s], class [%s], entitlement [write_system_properties], property [%s]", "Not entitled: component [%s], module [%s], class [%s], entitlement [write_system_properties], property [%s]",
entitlements.componentName(), entitlements.componentName(),
@ -373,6 +402,14 @@ public class PolicyManager {
); );
} }
private static void notEntitled(String message) {
throw new NotEntitledException(message);
}
public void checkManageThreadsEntitlement(Class<?> callerClass) {
checkEntitlementPresent(callerClass, ManageThreadsEntitlement.class);
}
private void checkEntitlementPresent(Class<?> callerClass, Class<? extends Entitlement> entitlementClass) { private void checkEntitlementPresent(Class<?> callerClass, Class<? extends Entitlement> entitlementClass) {
var requestingClass = requestingClass(callerClass); var requestingClass = requestingClass(callerClass);
if (isTriviallyAllowed(requestingClass)) { if (isTriviallyAllowed(requestingClass)) {
@ -396,7 +433,7 @@ public class PolicyManager {
if (pluginName != null) { if (pluginName != null) {
var pluginEntitlements = pluginsEntitlements.get(pluginName); var pluginEntitlements = pluginsEntitlements.get(pluginName);
if (pluginEntitlements == null) { if (pluginEntitlements == null) {
return ModuleEntitlements.none(pluginName); return defaultEntitlements(pluginName);
} else { } else {
final String scopeName; final String scopeName;
if (requestingModule.isNamed() == false) { if (requestingModule.isNamed() == false) {
@ -410,10 +447,10 @@ public class PolicyManager {
if (requestingModule.isNamed() == false && requestingClass.getPackageName().startsWith(apmAgentPackageName)) { if (requestingModule.isNamed() == false && requestingClass.getPackageName().startsWith(apmAgentPackageName)) {
// The APM agent is the only thing running non-modular in the system classloader // The APM agent is the only thing running non-modular in the system classloader
return ModuleEntitlements.from(APM_AGENT_COMPONENT_NAME, apmAgentEntitlements); return policyEntitlements(APM_AGENT_COMPONENT_NAME, apmAgentEntitlements);
} }
return ModuleEntitlements.none(UNKNOWN_COMPONENT_NAME); return defaultEntitlements(UNKNOWN_COMPONENT_NAME);
} }
private ModuleEntitlements getModuleScopeEntitlements( private ModuleEntitlements getModuleScopeEntitlements(
@ -423,9 +460,9 @@ public class PolicyManager {
) { ) {
var entitlements = scopeEntitlements.get(moduleName); var entitlements = scopeEntitlements.get(moduleName);
if (entitlements == null) { if (entitlements == null) {
return ModuleEntitlements.none(componentName); return defaultEntitlements(componentName);
} }
return ModuleEntitlements.from(componentName, entitlements); return policyEntitlements(componentName, entitlements);
} }
private static boolean isServerModule(Module requestingModule) { private static boolean isServerModule(Module requestingModule) {

View file

@ -14,8 +14,10 @@ import org.elasticsearch.entitlement.runtime.policy.entitlements.Entitlement;
import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement; import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement;
import org.elasticsearch.entitlement.runtime.policy.entitlements.InboundNetworkEntitlement; import org.elasticsearch.entitlement.runtime.policy.entitlements.InboundNetworkEntitlement;
import org.elasticsearch.entitlement.runtime.policy.entitlements.LoadNativeLibrariesEntitlement; import org.elasticsearch.entitlement.runtime.policy.entitlements.LoadNativeLibrariesEntitlement;
import org.elasticsearch.entitlement.runtime.policy.entitlements.ManageThreadsEntitlement;
import org.elasticsearch.entitlement.runtime.policy.entitlements.OutboundNetworkEntitlement; import org.elasticsearch.entitlement.runtime.policy.entitlements.OutboundNetworkEntitlement;
import org.elasticsearch.entitlement.runtime.policy.entitlements.SetHttpsConnectionPropertiesEntitlement; import org.elasticsearch.entitlement.runtime.policy.entitlements.SetHttpsConnectionPropertiesEntitlement;
import org.elasticsearch.entitlement.runtime.policy.entitlements.WriteAllSystemPropertiesEntitlement;
import org.elasticsearch.entitlement.runtime.policy.entitlements.WriteSystemPropertiesEntitlement; import org.elasticsearch.entitlement.runtime.policy.entitlements.WriteSystemPropertiesEntitlement;
import org.elasticsearch.xcontent.XContentLocation; import org.elasticsearch.xcontent.XContentLocation;
import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParser;
@ -45,20 +47,22 @@ import java.util.stream.Stream;
*/ */
public class PolicyParser { public class PolicyParser {
private static final Map<String, Class<?>> EXTERNAL_ENTITLEMENTS = Stream.of( private static final Map<String, Class<? extends Entitlement>> EXTERNAL_ENTITLEMENTS = Stream.of(
FilesEntitlement.class,
CreateClassLoaderEntitlement.class, CreateClassLoaderEntitlement.class,
SetHttpsConnectionPropertiesEntitlement.class, FilesEntitlement.class,
OutboundNetworkEntitlement.class,
InboundNetworkEntitlement.class, InboundNetworkEntitlement.class,
WriteSystemPropertiesEntitlement.class, LoadNativeLibrariesEntitlement.class,
LoadNativeLibrariesEntitlement.class ManageThreadsEntitlement.class,
OutboundNetworkEntitlement.class,
SetHttpsConnectionPropertiesEntitlement.class,
WriteAllSystemPropertiesEntitlement.class,
WriteSystemPropertiesEntitlement.class
).collect(Collectors.toUnmodifiableMap(PolicyParser::getEntitlementTypeName, Function.identity())); ).collect(Collectors.toUnmodifiableMap(PolicyParser::getEntitlementTypeName, Function.identity()));
protected final XContentParser policyParser; protected final XContentParser policyParser;
protected final String policyName; protected final String policyName;
private final boolean isExternalPlugin; private final boolean isExternalPlugin;
private final Map<String, Class<?>> externalEntitlements; private final Map<String, Class<? extends Entitlement>> externalEntitlements;
static String getEntitlementTypeName(Class<? extends Entitlement> entitlementClass) { static String getEntitlementTypeName(Class<? extends Entitlement> entitlementClass) {
var entitlementClassName = entitlementClass.getSimpleName(); var entitlementClassName = entitlementClass.getSimpleName();
@ -81,8 +85,12 @@ public class PolicyParser {
} }
// package private for tests // package private for tests
PolicyParser(InputStream inputStream, String policyName, boolean isExternalPlugin, Map<String, Class<?>> externalEntitlements) PolicyParser(
throws IOException { InputStream inputStream,
String policyName,
boolean isExternalPlugin,
Map<String, Class<? extends Entitlement>> externalEntitlements
) throws IOException {
this.policyParser = YamlXContent.yamlXContent.createParser(XContentParserConfiguration.EMPTY, Objects.requireNonNull(inputStream)); this.policyParser = YamlXContent.yamlXContent.createParser(XContentParserConfiguration.EMPTY, Objects.requireNonNull(inputStream));
this.policyName = policyName; this.policyName = policyName;
this.isExternalPlugin = isExternalPlugin; this.isExternalPlugin = isExternalPlugin;

View file

@ -0,0 +1,17 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.entitlement.runtime.policy.entitlements;
import org.elasticsearch.entitlement.runtime.policy.ExternalEntitlement;
public record ManageThreadsEntitlement() implements Entitlement {
@ExternalEntitlement(esModulesOnly = false)
public ManageThreadsEntitlement {}
}

View file

@ -7,8 +7,9 @@
* License v3.0 only", or the "Server Side Public License, v 1". * License v3.0 only", or the "Server Side Public License, v 1".
*/ */
package org.elasticsearch.common.util; package org.elasticsearch.entitlement.runtime.policy.entitlements;
public interface Countable { /**
int size(); * Describes an entitlement for reading file store attributes (e.g. disk space)
} */
public record ReadStoreAttributesEntitlement() implements Entitlement {}

View file

@ -36,13 +36,13 @@ public class FileAccessTreeTests extends ESTestCase {
} }
public void testEmpty() { public void testEmpty() {
var tree = FileAccessTree.of(FilesEntitlement.EMPTY); var tree = accessTree(FilesEntitlement.EMPTY);
assertThat(tree.canRead(path("path")), is(false)); assertThat(tree.canRead(path("path")), is(false));
assertThat(tree.canWrite(path("path")), is(false)); assertThat(tree.canWrite(path("path")), is(false));
} }
public void testRead() { public void testRead() {
var tree = FileAccessTree.of(entitlement("foo", "read")); var tree = accessTree(entitlement("foo", "read"));
assertThat(tree.canRead(path("foo")), is(true)); assertThat(tree.canRead(path("foo")), is(true));
assertThat(tree.canRead(path("foo/subdir")), is(true)); assertThat(tree.canRead(path("foo/subdir")), is(true));
assertThat(tree.canRead(path("food")), is(false)); assertThat(tree.canRead(path("food")), is(false));
@ -54,7 +54,7 @@ public class FileAccessTreeTests extends ESTestCase {
} }
public void testWrite() { public void testWrite() {
var tree = FileAccessTree.of(entitlement("foo", "read_write")); var tree = accessTree(entitlement("foo", "read_write"));
assertThat(tree.canWrite(path("foo")), is(true)); assertThat(tree.canWrite(path("foo")), is(true));
assertThat(tree.canWrite(path("foo/subdir")), is(true)); assertThat(tree.canWrite(path("foo/subdir")), is(true));
assertThat(tree.canWrite(path("food")), is(false)); assertThat(tree.canWrite(path("food")), is(false));
@ -66,7 +66,7 @@ public class FileAccessTreeTests extends ESTestCase {
} }
public void testTwoPaths() { public void testTwoPaths() {
var tree = FileAccessTree.of(entitlement("foo", "read", "bar", "read")); var tree = accessTree(entitlement("foo", "read", "bar", "read"));
assertThat(tree.canRead(path("a")), is(false)); assertThat(tree.canRead(path("a")), is(false));
assertThat(tree.canRead(path("bar")), is(true)); assertThat(tree.canRead(path("bar")), is(true));
assertThat(tree.canRead(path("bar/subdir")), is(true)); assertThat(tree.canRead(path("bar/subdir")), is(true));
@ -77,7 +77,7 @@ public class FileAccessTreeTests extends ESTestCase {
} }
public void testReadWriteUnderRead() { public void testReadWriteUnderRead() {
var tree = FileAccessTree.of(entitlement("foo", "read", "foo/bar", "read_write")); var tree = accessTree(entitlement("foo", "read", "foo/bar", "read_write"));
assertThat(tree.canRead(path("foo")), is(true)); assertThat(tree.canRead(path("foo")), is(true));
assertThat(tree.canWrite(path("foo")), is(false)); assertThat(tree.canWrite(path("foo")), is(false));
assertThat(tree.canRead(path("foo/bar")), is(true)); assertThat(tree.canRead(path("foo/bar")), is(true));
@ -85,7 +85,7 @@ public class FileAccessTreeTests extends ESTestCase {
} }
public void testNormalizePath() { public void testNormalizePath() {
var tree = FileAccessTree.of(entitlement("foo/../bar", "read")); var tree = accessTree(entitlement("foo/../bar", "read"));
assertThat(tree.canRead(path("foo/../bar")), is(true)); assertThat(tree.canRead(path("foo/../bar")), is(true));
assertThat(tree.canRead(path("foo")), is(false)); assertThat(tree.canRead(path("foo")), is(false));
assertThat(tree.canRead(path("")), is(false)); assertThat(tree.canRead(path("")), is(false));
@ -93,7 +93,7 @@ public class FileAccessTreeTests extends ESTestCase {
public void testForwardSlashes() { public void testForwardSlashes() {
String sep = getDefaultFileSystem().getSeparator(); String sep = getDefaultFileSystem().getSeparator();
var tree = FileAccessTree.of(entitlement("a/b", "read", "m" + sep + "n", "read")); var tree = accessTree(entitlement("a/b", "read", "m" + sep + "n", "read"));
// Native separators work // Native separators work
assertThat(tree.canRead(path("a" + sep + "b")), is(true)); assertThat(tree.canRead(path("a" + sep + "b")), is(true));
@ -104,6 +104,18 @@ public class FileAccessTreeTests extends ESTestCase {
assertThat(tree.canRead(path("m/n")), is(true)); assertThat(tree.canRead(path("m/n")), is(true));
} }
public void testTempDirAccess() {
Path tempDir = createTempDir();
var tree = FileAccessTree.of(FilesEntitlement.EMPTY, tempDir);
assertThat(tree.canRead(tempDir), is(true));
assertThat(tree.canWrite(tempDir), is(true));
}
FileAccessTree accessTree(FilesEntitlement entitlement) {
return FileAccessTree.of(entitlement, createTempDir());
}
FilesEntitlement entitlement(String... values) { FilesEntitlement entitlement(String... values) {
List<Object> filesData = new ArrayList<>(); List<Object> filesData = new ArrayList<>();
for (int i = 0; i < values.length; i += 2) { for (int i = 0; i < values.length; i += 2) {

View file

@ -71,16 +71,21 @@ public class PolicyManagerTests extends ESTestCase {
Map.of("plugin1", createPluginPolicy("plugin.module")), Map.of("plugin1", createPluginPolicy("plugin.module")),
c -> "plugin1", c -> "plugin1",
TEST_AGENTS_PACKAGE_NAME, TEST_AGENTS_PACKAGE_NAME,
NO_ENTITLEMENTS_MODULE NO_ENTITLEMENTS_MODULE,
createTempDir()
); );
// Any class from the current module (unnamed) will do // Any class from the current module (unnamed) will do
var callerClass = this.getClass(); var callerClass = this.getClass();
var requestingModule = callerClass.getModule(); var requestingModule = callerClass.getModule();
assertEquals("No policy for the unnamed module", ModuleEntitlements.none("plugin1"), policyManager.getEntitlements(callerClass)); assertEquals(
"No policy for the unnamed module",
policyManager.defaultEntitlements("plugin1"),
policyManager.getEntitlements(callerClass)
);
assertEquals(Map.of(requestingModule, ModuleEntitlements.none("plugin1")), policyManager.moduleEntitlementsMap); assertEquals(Map.of(requestingModule, policyManager.defaultEntitlements("plugin1")), policyManager.moduleEntitlementsMap);
} }
public void testGetEntitlementsThrowsOnMissingPolicyForPlugin() { public void testGetEntitlementsThrowsOnMissingPolicyForPlugin() {
@ -90,16 +95,17 @@ public class PolicyManagerTests extends ESTestCase {
Map.of(), Map.of(),
c -> "plugin1", c -> "plugin1",
TEST_AGENTS_PACKAGE_NAME, TEST_AGENTS_PACKAGE_NAME,
NO_ENTITLEMENTS_MODULE NO_ENTITLEMENTS_MODULE,
createTempDir()
); );
// Any class from the current module (unnamed) will do // Any class from the current module (unnamed) will do
var callerClass = this.getClass(); var callerClass = this.getClass();
var requestingModule = callerClass.getModule(); var requestingModule = callerClass.getModule();
assertEquals("No policy for this plugin", ModuleEntitlements.none("plugin1"), policyManager.getEntitlements(callerClass)); assertEquals("No policy for this plugin", policyManager.defaultEntitlements("plugin1"), policyManager.getEntitlements(callerClass));
assertEquals(Map.of(requestingModule, ModuleEntitlements.none("plugin1")), policyManager.moduleEntitlementsMap); assertEquals(Map.of(requestingModule, policyManager.defaultEntitlements("plugin1")), policyManager.moduleEntitlementsMap);
} }
public void testGetEntitlementsFailureIsCached() { public void testGetEntitlementsFailureIsCached() {
@ -109,21 +115,22 @@ public class PolicyManagerTests extends ESTestCase {
Map.of(), Map.of(),
c -> "plugin1", c -> "plugin1",
TEST_AGENTS_PACKAGE_NAME, TEST_AGENTS_PACKAGE_NAME,
NO_ENTITLEMENTS_MODULE NO_ENTITLEMENTS_MODULE,
createTempDir()
); );
// Any class from the current module (unnamed) will do // Any class from the current module (unnamed) will do
var callerClass = this.getClass(); var callerClass = this.getClass();
var requestingModule = callerClass.getModule(); var requestingModule = callerClass.getModule();
assertEquals(ModuleEntitlements.none("plugin1"), policyManager.getEntitlements(callerClass)); assertEquals(policyManager.defaultEntitlements("plugin1"), policyManager.getEntitlements(callerClass));
assertEquals(Map.of(requestingModule, ModuleEntitlements.none("plugin1")), policyManager.moduleEntitlementsMap); assertEquals(Map.of(requestingModule, policyManager.defaultEntitlements("plugin1")), policyManager.moduleEntitlementsMap);
// A second time // A second time
assertEquals(ModuleEntitlements.none("plugin1"), policyManager.getEntitlements(callerClass)); assertEquals(policyManager.defaultEntitlements("plugin1"), policyManager.getEntitlements(callerClass));
// Nothing new in the map // Nothing new in the map
assertEquals(Map.of(requestingModule, ModuleEntitlements.none("plugin1")), policyManager.moduleEntitlementsMap); assertEquals(Map.of(requestingModule, policyManager.defaultEntitlements("plugin1")), policyManager.moduleEntitlementsMap);
} }
public void testGetEntitlementsReturnsEntitlementsForPluginUnnamedModule() { public void testGetEntitlementsReturnsEntitlementsForPluginUnnamedModule() {
@ -133,7 +140,8 @@ public class PolicyManagerTests extends ESTestCase {
Map.ofEntries(entry("plugin2", createPluginPolicy(ALL_UNNAMED))), Map.ofEntries(entry("plugin2", createPluginPolicy(ALL_UNNAMED))),
c -> "plugin2", c -> "plugin2",
TEST_AGENTS_PACKAGE_NAME, TEST_AGENTS_PACKAGE_NAME,
NO_ENTITLEMENTS_MODULE NO_ENTITLEMENTS_MODULE,
createTempDir()
); );
// Any class from the current module (unnamed) will do // Any class from the current module (unnamed) will do
@ -150,7 +158,8 @@ public class PolicyManagerTests extends ESTestCase {
Map.of(), Map.of(),
c -> null, c -> null,
TEST_AGENTS_PACKAGE_NAME, TEST_AGENTS_PACKAGE_NAME,
NO_ENTITLEMENTS_MODULE NO_ENTITLEMENTS_MODULE,
createTempDir()
); );
// Tests do not run modular, so we cannot use a server class. // Tests do not run modular, so we cannot use a server class.
@ -162,11 +171,14 @@ public class PolicyManagerTests extends ESTestCase {
assertEquals( assertEquals(
"No policy for this module in server", "No policy for this module in server",
ModuleEntitlements.none(SERVER_COMPONENT_NAME), policyManager.defaultEntitlements(SERVER_COMPONENT_NAME),
policyManager.getEntitlements(mockServerClass) policyManager.getEntitlements(mockServerClass)
); );
assertEquals(Map.of(requestingModule, ModuleEntitlements.none(SERVER_COMPONENT_NAME)), policyManager.moduleEntitlementsMap); assertEquals(
Map.of(requestingModule, policyManager.defaultEntitlements(SERVER_COMPONENT_NAME)),
policyManager.moduleEntitlementsMap
);
} }
public void testGetEntitlementsReturnsEntitlementsForServerModule() throws ClassNotFoundException { public void testGetEntitlementsReturnsEntitlementsForServerModule() throws ClassNotFoundException {
@ -176,7 +188,8 @@ public class PolicyManagerTests extends ESTestCase {
Map.of(), Map.of(),
c -> null, c -> null,
TEST_AGENTS_PACKAGE_NAME, TEST_AGENTS_PACKAGE_NAME,
NO_ENTITLEMENTS_MODULE NO_ENTITLEMENTS_MODULE,
createTempDir()
); );
// Tests do not run modular, so we cannot use a server class. // Tests do not run modular, so we cannot use a server class.
@ -201,7 +214,8 @@ public class PolicyManagerTests extends ESTestCase {
Map.of("mock-plugin", createPluginPolicy("org.example.plugin")), Map.of("mock-plugin", createPluginPolicy("org.example.plugin")),
c -> "mock-plugin", c -> "mock-plugin",
TEST_AGENTS_PACKAGE_NAME, TEST_AGENTS_PACKAGE_NAME,
NO_ENTITLEMENTS_MODULE NO_ENTITLEMENTS_MODULE,
createTempDir()
); );
var layer = createLayerForJar(jar, "org.example.plugin"); var layer = createLayerForJar(jar, "org.example.plugin");
@ -220,7 +234,8 @@ public class PolicyManagerTests extends ESTestCase {
Map.ofEntries(entry("plugin2", createPluginPolicy(ALL_UNNAMED))), Map.ofEntries(entry("plugin2", createPluginPolicy(ALL_UNNAMED))),
c -> "plugin2", c -> "plugin2",
TEST_AGENTS_PACKAGE_NAME, TEST_AGENTS_PACKAGE_NAME,
NO_ENTITLEMENTS_MODULE NO_ENTITLEMENTS_MODULE,
createTempDir()
); );
// Any class from the current module (unnamed) will do // Any class from the current module (unnamed) will do
@ -278,7 +293,8 @@ public class PolicyManagerTests extends ESTestCase {
Map.of(), Map.of(),
c -> c.getPackageName().startsWith(TEST_AGENTS_PACKAGE_NAME) ? null : "test", c -> c.getPackageName().startsWith(TEST_AGENTS_PACKAGE_NAME) ? null : "test",
TEST_AGENTS_PACKAGE_NAME, TEST_AGENTS_PACKAGE_NAME,
NO_ENTITLEMENTS_MODULE NO_ENTITLEMENTS_MODULE,
createTempDir()
); );
ModuleEntitlements agentsEntitlements = policyManager.getEntitlements(TestAgent.class); ModuleEntitlements agentsEntitlements = policyManager.getEntitlements(TestAgent.class);
assertThat(agentsEntitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(true)); assertThat(agentsEntitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(true));
@ -305,7 +321,8 @@ public class PolicyManagerTests extends ESTestCase {
Map.of(), Map.of(),
c -> "test", c -> "test",
TEST_AGENTS_PACKAGE_NAME, TEST_AGENTS_PACKAGE_NAME,
NO_ENTITLEMENTS_MODULE NO_ENTITLEMENTS_MODULE,
createTempDir()
) )
); );
assertEquals( assertEquals(
@ -321,7 +338,8 @@ public class PolicyManagerTests extends ESTestCase {
Map.of(), Map.of(),
c -> "test", c -> "test",
TEST_AGENTS_PACKAGE_NAME, TEST_AGENTS_PACKAGE_NAME,
NO_ENTITLEMENTS_MODULE NO_ENTITLEMENTS_MODULE,
createTempDir()
) )
); );
assertEquals( assertEquals(
@ -352,7 +370,8 @@ public class PolicyManagerTests extends ESTestCase {
), ),
c -> "plugin1", c -> "plugin1",
TEST_AGENTS_PACKAGE_NAME, TEST_AGENTS_PACKAGE_NAME,
NO_ENTITLEMENTS_MODULE NO_ENTITLEMENTS_MODULE,
createTempDir()
) )
); );
assertEquals( assertEquals(
@ -371,7 +390,8 @@ public class PolicyManagerTests extends ESTestCase {
Map.of(), Map.of(),
c -> "test", // Insist that the class is in a plugin c -> "test", // Insist that the class is in a plugin
TEST_AGENTS_PACKAGE_NAME, TEST_AGENTS_PACKAGE_NAME,
NO_ENTITLEMENTS_MODULE NO_ENTITLEMENTS_MODULE,
createTempDir()
); );
ModuleEntitlements notAgentsEntitlements = policyManager.getEntitlements(TestAgent.class); ModuleEntitlements notAgentsEntitlements = policyManager.getEntitlements(TestAgent.class);
assertThat(notAgentsEntitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(false)); assertThat(notAgentsEntitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(false));
@ -385,7 +405,15 @@ public class PolicyManagerTests extends ESTestCase {
} }
private static PolicyManager policyManager(String agentsPackageName, Module entitlementsModule) { private static PolicyManager policyManager(String agentsPackageName, Module entitlementsModule) {
return new PolicyManager(createEmptyTestServerPolicy(), List.of(), Map.of(), c -> "test", agentsPackageName, entitlementsModule); return new PolicyManager(
createEmptyTestServerPolicy(),
List.of(),
Map.of(),
c -> "test",
agentsPackageName,
entitlementsModule,
createTempDir()
);
} }
private static Policy createEmptyTestServerPolicy() { private static Policy createEmptyTestServerPolicy() {

View file

@ -20,6 +20,7 @@ import org.elasticsearch.client.WarningsHandler;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Nullable;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.ElasticsearchCluster;
import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.FeatureFlag;
import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.cluster.local.distribution.DistributionType;
@ -170,7 +171,7 @@ public class FullClusterRestartIT extends ParameterizedFullClusterRestartTestCas
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private void testDatabasesLoaded() throws IOException { private void testDatabasesLoaded() throws IOException {
Request getTaskState = new Request("GET", "/_cluster/state"); Request getTaskState = new Request("GET", "/_cluster/state");
ObjectPath state = ObjectPath.createFromResponse(client().performRequest(getTaskState)); ObjectPath state = ObjectPath.createFromResponse(assertOK(client().performRequest(getTaskState)));
List<?> tasks = state.evaluate("metadata.persistent_tasks.tasks"); List<?> tasks = state.evaluate("metadata.persistent_tasks.tasks");
// Short-circuit to avoid using steams if the list is empty // Short-circuit to avoid using steams if the list is empty
@ -196,7 +197,10 @@ public class FullClusterRestartIT extends ParameterizedFullClusterRestartTestCas
private void testCatIndices(List<String> indexNames, @Nullable List<String> additionalIndexNames) throws IOException { private void testCatIndices(List<String> indexNames, @Nullable List<String> additionalIndexNames) throws IOException {
Request catIndices = new Request("GET", "_cat/indices/*?s=index&h=index&expand_wildcards=all"); Request catIndices = new Request("GET", "_cat/indices/*?s=index&h=index&expand_wildcards=all");
String response = EntityUtils.toString(client().performRequest(catIndices).getEntity()); // the cat APIs can sometimes 404, erroneously
// see https://github.com/elastic/elasticsearch/issues/104371
setIgnoredErrorResponseCodes(catIndices, RestStatus.NOT_FOUND);
String response = EntityUtils.toString(assertOK(client().performRequest(catIndices)).getEntity());
List<String> indices = List.of(response.trim().split("\\s+")); List<String> indices = List.of(response.trim().split("\\s+"));
if (additionalIndexNames != null && additionalIndexNames.isEmpty() == false) { if (additionalIndexNames != null && additionalIndexNames.isEmpty() == false) {
@ -215,7 +219,7 @@ public class FullClusterRestartIT extends ParameterizedFullClusterRestartTestCas
assertOK(client().performRequest(putDoc)); assertOK(client().performRequest(putDoc));
Request getDoc = new Request("GET", "/my-index-00001/_doc/my_id"); Request getDoc = new Request("GET", "/my-index-00001/_doc/my_id");
ObjectPath doc = ObjectPath.createFromResponse(client().performRequest(getDoc)); ObjectPath doc = ObjectPath.createFromResponse(assertOK(client().performRequest(getDoc)));
assertNull(doc.evaluate("_source.tags")); assertNull(doc.evaluate("_source.tags"));
assertEquals("Sweden", doc.evaluate("_source.geo.country_name")); assertEquals("Sweden", doc.evaluate("_source.geo.country_name"));
} }
@ -225,8 +229,7 @@ public class FullClusterRestartIT extends ParameterizedFullClusterRestartTestCas
getStar.setOptions( getStar.setOptions(
RequestOptions.DEFAULT.toBuilder().setWarningsHandler(WarningsHandler.PERMISSIVE) // we don't care about warnings, just errors RequestOptions.DEFAULT.toBuilder().setWarningsHandler(WarningsHandler.PERMISSIVE) // we don't care about warnings, just errors
); );
Response response = client().performRequest(getStar); Response response = assertOK(client().performRequest(getStar));
assertOK(response);
if (additionalIndexNames != null && additionalIndexNames.isEmpty() == false) { if (additionalIndexNames != null && additionalIndexNames.isEmpty() == false) {
indexNames = new ArrayList<>(indexNames); // recopy into a mutable list indexNames = new ArrayList<>(indexNames); // recopy into a mutable list
@ -244,8 +247,7 @@ public class FullClusterRestartIT extends ParameterizedFullClusterRestartTestCas
.addHeader("X-elastic-product-origin", "kibana") .addHeader("X-elastic-product-origin", "kibana")
.setWarningsHandler(WarningsHandler.PERMISSIVE) // we don't care about warnings, just errors .setWarningsHandler(WarningsHandler.PERMISSIVE) // we don't care about warnings, just errors
); );
Response response = client().performRequest(getStar); Response response = assertOK(client().performRequest(getStar));
assertOK(response);
if (additionalIndexNames != null && additionalIndexNames.isEmpty() == false) { if (additionalIndexNames != null && additionalIndexNames.isEmpty() == false) {
indexNames = new ArrayList<>(indexNames); // recopy into a mutable list indexNames = new ArrayList<>(indexNames); // recopy into a mutable list

View file

@ -423,7 +423,7 @@ public class MustacheScriptEngineTests extends ESTestCase {
ex.getCause().getCause(), ex.getCause().getCause(),
allOf( allOf(
instanceOf(SizeLimitingStringWriter.SizeLimitExceededException.class), instanceOf(SizeLimitingStringWriter.SizeLimitExceededException.class),
transformedMatch(Throwable::getMessage, endsWith("has exceeded the size limit [1024]")) transformedMatch(Throwable::getMessage, endsWith("has size [1030] which exceeds the size limit [1024]"))
) )
); );
} }

View file

@ -86,7 +86,8 @@ public class TokenCountFieldMapper extends FieldMapper {
store.getValue(), store.getValue(),
hasDocValues.getValue(), hasDocValues.getValue(),
nullValue.getValue(), nullValue.getValue(),
meta.getValue() meta.getValue(),
context.isSourceSynthetic()
); );
return new TokenCountFieldMapper(leafName(), ft, builderParams(this, context), this); return new TokenCountFieldMapper(leafName(), ft, builderParams(this, context), this);
} }
@ -100,7 +101,8 @@ public class TokenCountFieldMapper extends FieldMapper {
boolean isStored, boolean isStored,
boolean hasDocValues, boolean hasDocValues,
Number nullValue, Number nullValue,
Map<String, String> meta Map<String, String> meta,
boolean isSyntheticSource
) { ) {
super( super(
name, name,
@ -114,7 +116,8 @@ public class TokenCountFieldMapper extends FieldMapper {
null, null,
false, false,
null, null,
null null,
isSyntheticSource
); );
} }

View file

@ -277,7 +277,6 @@ public class FeatureMigrationIT extends AbstractFeatureMigrationIntegTest {
}); });
} }
@AwaitsFix(bugUrl = "ES-10666") // This test uncovered an existing issue
public void testIndexBlockIsRemovedWhenAliasRequestFails() throws Exception { public void testIndexBlockIsRemovedWhenAliasRequestFails() throws Exception {
createSystemIndexForDescriptor(INTERNAL_UNMANAGED); createSystemIndexForDescriptor(INTERNAL_UNMANAGED);
ensureGreen(); ensureGreen();

View file

@ -1,2 +1,3 @@
ALL-UNNAMED: ALL-UNNAMED:
- manage_threads
- outbound_network - outbound_network

View file

@ -1,2 +1,3 @@
io.netty.common: io.netty.common:
- outbound_network - outbound_network
- manage_threads

View file

@ -1,2 +1,3 @@
ALL-UNNAMED: ALL-UNNAMED:
- manage_threads
- outbound_network - outbound_network

View file

@ -1,6 +1,8 @@
io.netty.transport: io.netty.transport:
- inbound_network - inbound_network
- outbound_network - outbound_network
- manage_threads
io.netty.common: io.netty.common:
- inbound_network - inbound_network
- outbound_network - outbound_network
- manage_threads

View file

@ -254,9 +254,6 @@ tests:
- class: org.elasticsearch.xpack.security.FileSettingsRoleMappingsRestartIT - class: org.elasticsearch.xpack.security.FileSettingsRoleMappingsRestartIT
method: testFileSettingsReprocessedOnRestartWithoutVersionChange method: testFileSettingsReprocessedOnRestartWithoutVersionChange
issue: https://github.com/elastic/elasticsearch/issues/120964 issue: https://github.com/elastic/elasticsearch/issues/120964
- class: org.elasticsearch.xpack.ml.integration.ClassificationIT
method: testWithOnlyTrainingRowsAndTrainingPercentIsFifty_DependentVariableIsKeyword
issue: https://github.com/elastic/elasticsearch/issues/120071
- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests - class: org.elasticsearch.xpack.security.profile.ProfileIntegTests
method: testGetUsersWithProfileUidWhenProfileIndexDoesNotExists method: testGetUsersWithProfileUidWhenProfileIndexDoesNotExists
issue: https://github.com/elastic/elasticsearch/issues/121179 issue: https://github.com/elastic/elasticsearch/issues/121179
@ -265,9 +262,6 @@ tests:
- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests - class: org.elasticsearch.xpack.security.profile.ProfileIntegTests
method: testSetEnabled method: testSetEnabled
issue: https://github.com/elastic/elasticsearch/issues/121183 issue: https://github.com/elastic/elasticsearch/issues/121183
- class: org.elasticsearch.xpack.ml.integration.ClassificationIT
method: testWithDatastreams
issue: https://github.com/elastic/elasticsearch/issues/121236
- class: org.elasticsearch.xpack.test.rest.XPackRestIT - class: org.elasticsearch.xpack.test.rest.XPackRestIT
method: test {p0=transform/*} method: test {p0=transform/*}
issue: https://github.com/elastic/elasticsearch/issues/120816 issue: https://github.com/elastic/elasticsearch/issues/120816
@ -297,9 +291,6 @@ tests:
- class: org.elasticsearch.env.NodeEnvironmentTests - class: org.elasticsearch.env.NodeEnvironmentTests
method: testGetBestDowngradeVersion method: testGetBestDowngradeVersion
issue: https://github.com/elastic/elasticsearch/issues/121316 issue: https://github.com/elastic/elasticsearch/issues/121316
- class: org.elasticsearch.index.engine.ShuffleForcedMergePolicyTests
method: testDiagnostics
issue: https://github.com/elastic/elasticsearch/issues/121336
- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT
method: test {yaml=reference/rest-api/security/invalidate-tokens/line_194} method: test {yaml=reference/rest-api/security/invalidate-tokens/line_194}
issue: https://github.com/elastic/elasticsearch/issues/121337 issue: https://github.com/elastic/elasticsearch/issues/121337
@ -317,9 +308,6 @@ tests:
issue: https://github.com/elastic/elasticsearch/issues/121151 issue: https://github.com/elastic/elasticsearch/issues/121151
- class: org.elasticsearch.test.rest.yaml.CcsCommonYamlTestSuiteIT - class: org.elasticsearch.test.rest.yaml.CcsCommonYamlTestSuiteIT
issue: https://github.com/elastic/elasticsearch/issues/121407 issue: https://github.com/elastic/elasticsearch/issues/121407
- class: org.elasticsearch.xpack.ml.integration.ClassificationIT
method: testDependentVariableIsAliasToNested
issue: https://github.com/elastic/elasticsearch/issues/121415
- class: org.elasticsearch.xpack.security.authc.jwt.JwtRealmSingleNodeTests - class: org.elasticsearch.xpack.security.authc.jwt.JwtRealmSingleNodeTests
method: testClientSecretRotation method: testClientSecretRotation
issue: https://github.com/elastic/elasticsearch/issues/120985 issue: https://github.com/elastic/elasticsearch/issues/120985
@ -329,30 +317,12 @@ tests:
- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests - class: org.elasticsearch.xpack.security.profile.ProfileIntegTests
method: testGetUsersWithProfileUid method: testGetUsersWithProfileUid
issue: https://github.com/elastic/elasticsearch/issues/121483 issue: https://github.com/elastic/elasticsearch/issues/121483
- class: org.elasticsearch.xpack.transform.checkpoint.TransformCCSCanMatchIT
method: testTransformLifecycle_RangeQueryThatMatchesNoShards
issue: https://github.com/elastic/elasticsearch/issues/121480
- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests - class: org.elasticsearch.xpack.security.profile.ProfileIntegTests
method: testSuggestProfilesWithHint method: testSuggestProfilesWithHint
issue: https://github.com/elastic/elasticsearch/issues/121116 issue: https://github.com/elastic/elasticsearch/issues/121116
- class: org.elasticsearch.xpack.security.profile.ProfileIntegTests - class: org.elasticsearch.xpack.security.profile.ProfileIntegTests
method: testSuggestProfileWithData method: testSuggestProfileWithData
issue: https://github.com/elastic/elasticsearch/issues/121258 issue: https://github.com/elastic/elasticsearch/issues/121258
- class: org.elasticsearch.ingest.geoip.FullClusterRestartIT
method: testGeoIpSystemFeaturesMigration {cluster=UPGRADED}
issue: https://github.com/elastic/elasticsearch/issues/121115
- class: org.elasticsearch.xpack.core.ilm.SetSingleNodeAllocateStepTests
method: testPerformActionSomeShardsOnlyOnNewNodesButNewNodesInvalidAttrs
issue: https://github.com/elastic/elasticsearch/issues/121495
- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT
method: test {p0=search.vectors/42_knn_search_int4_flat/Vector similarity with filter only}
issue: https://github.com/elastic/elasticsearch/issues/121412
- class: org.elasticsearch.xpack.ml.integration.ClassificationIT
method: testDependentVariableIsAliasToKeyword
issue: https://github.com/elastic/elasticsearch/issues/121492
- class: org.elasticsearch.search.CrossClusterSearchUnavailableClusterIT
method: testSearchSkipUnavailable
issue: https://github.com/elastic/elasticsearch/issues/121497
- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT
method: test {yaml=reference/cat/health/cat-health-no-timestamp-example} method: test {yaml=reference/cat/health/cat-health-no-timestamp-example}
issue: https://github.com/elastic/elasticsearch/issues/121867 issue: https://github.com/elastic/elasticsearch/issues/121867
@ -361,9 +331,6 @@ tests:
issue: https://github.com/elastic/elasticsearch/issues/121625 issue: https://github.com/elastic/elasticsearch/issues/121625
- class: org.elasticsearch.xpack.searchablesnapshots.hdfs.SecureHdfsSearchableSnapshotsIT - class: org.elasticsearch.xpack.searchablesnapshots.hdfs.SecureHdfsSearchableSnapshotsIT
issue: https://github.com/elastic/elasticsearch/issues/121967 issue: https://github.com/elastic/elasticsearch/issues/121967
- class: org.elasticsearch.action.search.SearchQueryThenFetchAsyncActionTests
method: testBottomFieldSort
issue: https://github.com/elastic/elasticsearch/issues/121503
- class: org.elasticsearch.xpack.application.CohereServiceUpgradeIT - class: org.elasticsearch.xpack.application.CohereServiceUpgradeIT
issue: https://github.com/elastic/elasticsearch/issues/121537 issue: https://github.com/elastic/elasticsearch/issues/121537
- class: org.elasticsearch.xpack.restart.FullClusterRestartIT - class: org.elasticsearch.xpack.restart.FullClusterRestartIT
@ -372,30 +339,9 @@ tests:
- class: org.elasticsearch.test.rest.ClientYamlTestSuiteIT - class: org.elasticsearch.test.rest.ClientYamlTestSuiteIT
method: test {yaml=snapshot.delete/10_basic/Delete a snapshot asynchronously} method: test {yaml=snapshot.delete/10_basic/Delete a snapshot asynchronously}
issue: https://github.com/elastic/elasticsearch/issues/122102 issue: https://github.com/elastic/elasticsearch/issues/122102
- class: org.elasticsearch.test.rest.ClientYamlTestSuiteIT
method: test {yaml=search/180_locale_dependent_mapping/Test Index and Search locale dependent mappings / dates}
issue: https://github.com/elastic/elasticsearch/issues/122103
- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT
method: test {yaml=snapshot.delete/10_basic/Delete a snapshot asynchronously}
issue: https://github.com/elastic/elasticsearch/issues/122104
- class: org.elasticsearch.xpack.ml.integration.ClassificationIT
method: testWithOnlyTrainingRowsAndTrainingPercentIsFifty_DependentVariableIsBoolean
issue: https://github.com/elastic/elasticsearch/issues/121680
- class: org.elasticsearch.xpack.downsample.DownsampleActionSingleNodeTests
method: testDuplicateDownsampleRequest
issue: https://github.com/elastic/elasticsearch/issues/122158
- class: org.elasticsearch.search.SearchCancellationIT - class: org.elasticsearch.search.SearchCancellationIT
method: testCancelFailedSearchWhenPartialResultDisallowed method: testCancelFailedSearchWhenPartialResultDisallowed
issue: https://github.com/elastic/elasticsearch/issues/121719 issue: https://github.com/elastic/elasticsearch/issues/121719
- class: org.elasticsearch.xpack.esql.analysis.VerifierTests
method: testChangePoint
issue: https://github.com/elastic/elasticsearch/issues/122179
- class: org.elasticsearch.xpack.esql.analysis.VerifierTests
method: testChangePoint_keySortable
issue: https://github.com/elastic/elasticsearch/issues/122180
- class: org.elasticsearch.xpack.esql.analysis.VerifierTests
method: testChangePoint_valueNumeric
issue: https://github.com/elastic/elasticsearch/issues/122181
- class: org.elasticsearch.datastreams.TSDBPassthroughIndexingIT - class: org.elasticsearch.datastreams.TSDBPassthroughIndexingIT
issue: https://github.com/elastic/elasticsearch/issues/121716 issue: https://github.com/elastic/elasticsearch/issues/121716
- class: org.elasticsearch.smoketest.SmokeTestMonitoringWithSecurityIT - class: org.elasticsearch.smoketest.SmokeTestMonitoringWithSecurityIT
@ -407,17 +353,41 @@ tests:
- class: org.elasticsearch.xpack.security.authz.IndexAliasesTests - class: org.elasticsearch.xpack.security.authz.IndexAliasesTests
method: testRemoveIndex method: testRemoveIndex
issue: https://github.com/elastic/elasticsearch/issues/122221 issue: https://github.com/elastic/elasticsearch/issues/122221
- class: org.elasticsearch.xpack.migrate.action.ReindexDatastreamIndexTransportActionIT
issue: https://github.com/elastic/elasticsearch/issues/121737
- class: org.elasticsearch.xpack.esql.action.EsqlActionBreakerIT
method: testGroupingMultiValueByOrdinals
issue: https://github.com/elastic/elasticsearch/issues/122228
- class: org.elasticsearch.xpack.esql.action.EsqlNodeFailureIT
method: testFailureLoadingFields
issue: https://github.com/elastic/elasticsearch/issues/122132
- class: org.elasticsearch.blocks.SimpleBlocksIT - class: org.elasticsearch.blocks.SimpleBlocksIT
method: testConcurrentAddBlock method: testConcurrentAddBlock
issue: https://github.com/elastic/elasticsearch/issues/122324 issue: https://github.com/elastic/elasticsearch/issues/122324
- class: org.elasticsearch.xpack.searchablesnapshots.hdfs.HdfsSearchableSnapshotsIT
issue: https://github.com/elastic/elasticsearch/issues/122024
- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT
method: test {yaml=reference/cat/health/cat-health-example}
issue: https://github.com/elastic/elasticsearch/issues/122335
- class: org.elasticsearch.xpack.esql.action.CrossClusterCancellationIT
method: testCloseSkipUnavailable
issue: https://github.com/elastic/elasticsearch/issues/122336
- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT
method: test {yaml=reference/alias/line_260}
issue: https://github.com/elastic/elasticsearch/issues/122343
- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT
method: test {yaml=reference/snapshot-restore/apis/get-snapshot-api/line_488}
issue: https://github.com/elastic/elasticsearch/issues/121611
- class: org.elasticsearch.repositories.blobstore.testkit.analyze.SecureHdfsRepositoryAnalysisRestIT
issue: https://github.com/elastic/elasticsearch/issues/122377
- class: org.elasticsearch.repositories.blobstore.testkit.analyze.HdfsRepositoryAnalysisRestIT
issue: https://github.com/elastic/elasticsearch/issues/122378
- class: org.elasticsearch.telemetry.apm.ApmAgentSettingsIT
issue: https://github.com/elastic/elasticsearch/issues/122546
- class: org.elasticsearch.search.SearchTimeoutIT
method: testSuggestTimeoutWithPartialResults
issue: https://github.com/elastic/elasticsearch/issues/122548
- class: org.elasticsearch.xpack.inference.mapper.SemanticInferenceMetadataFieldsRecoveryTests
method: testSnapshotRecovery {p0=false p1=false}
issue: https://github.com/elastic/elasticsearch/issues/122549
- class: org.elasticsearch.xpack.inference.mapper.SemanticInferenceMetadataFieldsRecoveryTests
method: testSnapshotRecovery {p0=true p1=false}
issue: https://github.com/elastic/elasticsearch/issues/122550
- class: org.elasticsearch.xpack.inference.mapper.SemanticInferenceMetadataFieldsRecoveryTests
method: testSnapshotRecovery {p0=false p1=true}
issue: https://github.com/elastic/elasticsearch/issues/122551
# Examples: # Examples:
# #

View file

@ -1,2 +1,3 @@
ALL-UNNAMED: ALL-UNNAMED:
- manage_threads
- outbound_network - outbound_network

View file

@ -50,7 +50,7 @@ public class SizeFieldMapper extends MetadataFieldMapper {
private static class SizeFieldType extends NumberFieldType { private static class SizeFieldType extends NumberFieldType {
SizeFieldType() { SizeFieldType() {
super(NAME, NumberType.INTEGER, true, true, true, false, null, Collections.emptyMap(), null, false, null, null); super(NAME, NumberType.INTEGER, true, true, true, false, null, Collections.emptyMap(), null, false, null, null, false);
} }
@Override @Override

View file

@ -1,5 +1,7 @@
ALL-UNNAMED: ALL-UNNAMED:
- manage_threads
- outbound_network - outbound_network
- load_native_libraries
- write_system_properties: - write_system_properties:
properties: properties:
- hadoop.home.dir - hadoop.home.dir

View file

@ -31,6 +31,7 @@ import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.IndexVersions;
import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.rest.action.admin.indices.RestPutIndexTemplateAction; import org.elasticsearch.rest.action.admin.indices.RestPutIndexTemplateAction;
import org.elasticsearch.search.SearchFeatures; import org.elasticsearch.search.SearchFeatures;
import org.elasticsearch.test.NotEqualMessageBuilder; import org.elasticsearch.test.NotEqualMessageBuilder;
@ -628,13 +629,14 @@ public class FullClusterRestartIT extends ParameterizedFullClusterRestartTestCas
) )
); );
// assertBusy to work around https://github.com/elastic/elasticsearch/issues/104371 assertBusy(() -> {
assertBusy( Request catIndices = new Request("GET", "/_cat/indices?v&error_trace");
() -> assertThat( // the cat APIs can sometimes 404, erroneously
EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices?v&error_trace")).getEntity()), // see https://github.com/elastic/elasticsearch/issues/104371
containsString("testrollover-000002") setIgnoredErrorResponseCodes(catIndices, RestStatus.NOT_FOUND);
) Response response = assertOK(client().performRequest(catIndices));
); assertThat(EntityUtils.toString(response.getEntity()), containsString("testrollover-000002"));
});
} }
Request countRequest = new Request("POST", "/" + index + "-*/_search"); Request countRequest = new Request("POST", "/" + index + "-*/_search");

View file

@ -82,4 +82,5 @@ tasks.named("yamlRestCompatTestTransform").configure ({ task ->
"cluster.desired_nodes/10_basic/Test update desired nodes with node_version generates a warning", "cluster.desired_nodes/10_basic/Test update desired nodes with node_version generates a warning",
"node_version warning is removed in 9.0" "node_version warning is removed in 9.0"
) )
task.skipTest("tsdb/20_mapping/nested fields", "nested field support in tsdb indices is now supported")
}) })

View file

@ -2008,3 +2008,143 @@ create index with use_synthetic_source:
flush: false flush: false
- gt: { test.store_size_in_bytes: 0 } - gt: { test.store_size_in_bytes: 0 }
- is_false: test.fields._recovery_source - is_false: test.fields._recovery_source
---
"Nested synthetic source with indexed dense vectors":
- requires:
test_runner_features: [ capabilities ]
capabilities:
- method: PUT
path: /{index}
capabilities: [ synthetic_nested_dense_vector_bug_fix ]
reason: "Requires synthetic source bugfix for dense vectors in nested objects"
- do:
indices.create:
index: nested_dense_vector_synthetic_test
body:
mappings:
properties:
parent:
type: nested
properties:
vector:
type: dense_vector
index: true
similarity: l2_norm
text:
type: text
settings:
index:
mapping:
source:
mode: synthetic
- do:
index:
index: nested_dense_vector_synthetic_test
id: 0
refresh: true
body: { "parent": [ { "vector": [ 1, 2 ],"text": "foo" }, { "vector": [ 2, 2 ], "text": "bar" } ] }
- do:
index:
index: nested_dense_vector_synthetic_test
id: 1
refresh: true
body: { "parent": [ { "text": "foo" }, { "vector": [ 2, 2 ], "text": "bar" } ] }
- do:
index:
index: nested_dense_vector_synthetic_test
id: 2
refresh: true
body: { "parent": [ { "vector": [ 1, 2 ] }, { "vector": [ 2, 2 ], "text": "bar" } ] }
- do:
search:
index: nested_dense_vector_synthetic_test
body:
query:
match_all: {}
- match: { hits.hits.0._source.parent.0.vector: [ 1.0, 2.0 ] }
- match: { hits.hits.0._source.parent.0.text: "foo" }
- match: { hits.hits.0._source.parent.1.vector: [ 2.0, 2.0 ] }
- match: { hits.hits.0._source.parent.1.text: "bar" }
- is_false: hits.hits.1._source.parent.0.vector
- match: { hits.hits.1._source.parent.0.text: "foo" }
- match: { hits.hits.1._source.parent.1.vector: [ 2.0, 2.0 ] }
- match: { hits.hits.1._source.parent.1.text: "bar" }
- match: {hits.hits.2._source.parent.0.vector: [ 1.0, 2.0 ] }
- is_false: hits.hits.2._source.parent.0.text
- match: { hits.hits.2._source.parent.1.vector: [ 2.0, 2.0 ] }
- match: { hits.hits.2._source.parent.1.text: "bar" }
---
"Nested synthetic source with un-indexed dense vectors":
- requires:
test_runner_features: [ capabilities ]
capabilities:
- method: PUT
path: /{index}
capabilities: [ synthetic_nested_dense_vector_bug_fix ]
reason: "Requires synthetic source bugfix for dense vectors in nested objects"
- do:
indices.create:
index: nested_dense_vector_synthetic_test
body:
mappings:
properties:
parent:
type: nested
properties:
vector:
type: dense_vector
index: false
text:
type: text
settings:
index:
mapping:
source:
mode: synthetic
- do:
index:
index: nested_dense_vector_synthetic_test
id: 0
refresh: true
body: { "parent": [ { "vector": [ 1, 2 ],"text": "foo" }, { "vector": [ 2, 2 ], "text": "bar" } ] }
- do:
index:
index: nested_dense_vector_synthetic_test
id: 1
refresh: true
body: { "parent": [ { "text": "foo" }, { "vector": [ 2, 2 ], "text": "bar" } ] }
- do:
index:
index: nested_dense_vector_synthetic_test
id: 2
refresh: true
body: { "parent": [ { "vector": [ 1, 2 ] }, { "vector": [ 2, 2 ], "text": "bar" } ] }
- do:
search:
index: nested_dense_vector_synthetic_test
body:
query:
match_all: {}
- match: { hits.hits.0._source.parent.0.vector: [ 1.0, 2.0 ] }
- match: { hits.hits.0._source.parent.0.text: "foo" }
- match: { hits.hits.0._source.parent.1.vector: [ 2.0, 2.0 ] }
- match: { hits.hits.0._source.parent.1.text: "bar" }
- is_false: hits.hits.1._source.parent.0.vector
- match: { hits.hits.1._source.parent.0.text: "foo" }
- match: { hits.hits.1._source.parent.1.vector: [ 2.0, 2.0 ] }
- match: { hits.hits.1._source.parent.1.text: "bar" }
- match: {hits.hits.2._source.parent.0.vector: [ 1.0, 2.0 ] }
- is_false: hits.hits.2._source.parent.0.text
- match: { hits.hits.2._source.parent.1.vector: [ 2.0, 2.0 ] }
- match: { hits.hits.2._source.parent.1.text: "bar" }

View file

@ -0,0 +1,233 @@
setup:
- requires:
cluster_features: ["mapper.tsdb_nested_field_support"]
reason: "tsdb index with nested field support enabled"
---
"Create TSDB index with field of nested type":
- do:
indices.create:
index: test
body:
settings:
index:
mode: time_series
number_of_replicas: 1
number_of_shards: 1
routing_path: [department]
time_series:
start_time: 2021-04-28T00:00:00Z
end_time: 2021-04-29T00:00:00Z
mappings:
properties:
"@timestamp":
type: date
department:
type: keyword
time_series_dimension: true
staff:
type: integer
courses:
type: nested
properties:
name:
type: keyword
credits:
type: integer
- do:
index:
index: test
body: { "@timestamp": "2021-04-28T01:00:00Z", "department": "compsci", "staff": 12, "courses": [ { "name": "Object Oriented Programming", "credits": 3 }, { "name": "Theory of Computation", "credits": 4 } ] }
- do:
index:
index: test
body: { "@timestamp": "2021-04-28T02:00:00Z", "department": "math", "staff": 20, "courses": [ { "name": "Precalculus", "credits": 1 }, { "name": "Linear Algebra", "credits": 3 } ] }
- do:
indices.refresh:
index: [ test ]
- do:
search:
index: test
body:
size: 0
query:
nested:
path: "courses"
query:
bool:
must:
- term:
courses.name: Precalculus
- term:
courses.credits: 3
- match: { hits.total.value: 0 }
- do:
search:
index: test
body:
query:
nested:
path: "courses"
query:
bool:
must:
- term:
courses.name: "Object Oriented Programming"
- term:
courses.credits: 3
- match: { hits.total.value: 1 }
- match: { "hits.hits.0._source.@timestamp": "2021-04-28T01:00:00.000Z" }
- match: { hits.hits.0._source.department: "compsci" }
- match: { hits.hits.0._source.courses: [ { "name": "Object Oriented Programming", "credits": 3 }, { "name": "Theory of Computation", "credits": 4, } ] }
---
"TSDB index with multi-level nested fields":
- do:
indices.create:
index: test
body:
settings:
index:
mode: time_series
number_of_replicas: 1
number_of_shards: 1
routing_path: [department]
time_series:
start_time: 2021-04-28T00:00:00Z
end_time: 2021-04-29T00:00:00Z
mappings:
properties:
"@timestamp":
type: date
department:
type: keyword
time_series_dimension: true
staff:
type: integer
courses:
type: nested
properties:
name:
type: keyword
credits:
type: integer
students:
type: nested
properties:
name:
type: text
major:
type: keyword
- do:
index:
index: test
body:
"@timestamp": "2021-04-28T01:00:00Z"
department: "compsci"
staff: 12
courses:
- name: "Object Oriented Programming"
credits: 3
students:
- name: "Kimora Tanner"
major: "Computer Science"
- name: "Bruno Garrett"
major: "Software Engineering"
- name: "Theory of Computation"
credits: 4
students:
- name: "Elliott Booker"
major: "Computer Engineering"
- name: "Kimora Tanner"
major: "Software Engineering"
- do:
index:
index: test
body:
"@timestamp": "2021-04-28T02:00:00Z"
department: "math"
staff: 20
courses:
- name: "Precalculus"
credits: 4
students:
- name: "Elliott Ayers"
major: "Software Engineering"
- name: "Sylvie Howe"
major: "Computer Engineering"
- name: "Linear Algebra"
credits: 3
students:
- name: "Kimora Tanner"
major: "Computer Science"
- name: "Bruno Garett"
major: "Software Engineering"
- name: "Amelia Booker"
major: "Psychology"
- do:
index:
index: test
body:
"@timestamp": "2021-04-28T03:00:00Z"
department: "compsci"
staff: 12
courses:
- name: "Object Oriented Programming"
credits: 3
students:
- name: "Kimora Tanner"
major: "Computer Science"
- name: "Bruno Garrett"
major: "Software Engineering"
- name: "Elliott Booker"
major: "Computer Engineering"
- name: "Theory of Computation"
credits: 4
students:
- name: "Kimora Tanner"
major: "Software Engineering"
- name: "Elliott Ayers"
major: "Software Engineering"
- name: "Apollo Pittman"
major: "Computer Engineering"
- do:
indices.refresh:
index: [ test ]
- do:
search:
index: test
body:
query:
nested:
path: "courses"
query:
bool:
must:
- nested:
path: "courses.students"
query:
bool:
must:
- match:
courses.students.name: "Elliott"
- term:
courses.students.major: "Computer Engineering"
- term:
courses.name: "Theory of Computation"
- match: { hits.total.value: 1 }
- match: { hits.hits.0._source.department: "compsci" }
- match: { "hits.hits.0._source.@timestamp": "2021-04-28T01:00:00.000Z" }

View file

@ -344,37 +344,6 @@ nested dimensions:
type: keyword type: keyword
time_series_dimension: true time_series_dimension: true
---
nested fields:
- requires:
cluster_features: ["gte_v8.2.0"]
reason: message changed in 8.2.0
- do:
catch: /cannot have nested fields when index is in \[index.mode=time_series\]/
indices.create:
index: test
body:
settings:
index:
mode: time_series
routing_path: [dim]
time_series:
start_time: 2021-04-28T00:00:00Z
end_time: 2021-04-29T00:00:00Z
mappings:
properties:
"@timestamp":
type: date
dim:
type: keyword
time_series_dimension: true
nested:
type: nested
properties:
foo:
type: keyword
--- ---
"Unable to define a metric type for a runtime field": "Unable to define a metric type for a runtime field":
- requires: - requires:

View file

@ -77,7 +77,9 @@ public class PersistentTaskCreationFailureIT extends ESIntegTestCase {
.pendingTasks() .pendingTasks()
.stream() .stream()
.filter( .filter(
pendingClusterTask -> pendingClusterTask.getSource().string().equals("finish persistent task (failed)") pendingClusterTask -> pendingClusterTask.getSource()
.string()
.matches("finish persistent task \\[.*] \\(failed\\)")
) )
.count(); .count();
assertThat(completePersistentTaskPendingTasksCount, lessThanOrEqualTo(1L)); assertThat(completePersistentTaskPendingTasksCount, lessThanOrEqualTo(1L));

View file

@ -14,6 +14,7 @@ import org.apache.lucene.search.BulkScorer;
import org.apache.lucene.search.ConstantScoreScorer; import org.apache.lucene.search.ConstantScoreScorer;
import org.apache.lucene.search.ConstantScoreWeight; import org.apache.lucene.search.ConstantScoreWeight;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
@ -22,8 +23,10 @@ import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.ScorerSupplier;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.Weight; import org.apache.lucene.search.Weight;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.CharsRefBuilder;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersion;
import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder;
@ -33,12 +36,23 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.TimeValue;
import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryRewriteContext;
import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.plugins.SearchPlugin;
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.ContextIndexSearcher;
import org.elasticsearch.search.rescore.RescoreContext;
import org.elasticsearch.search.rescore.Rescorer;
import org.elasticsearch.search.rescore.RescorerBuilder;
import org.elasticsearch.search.suggest.SortBy;
import org.elasticsearch.search.suggest.SuggestBuilder;
import org.elasticsearch.search.suggest.Suggester;
import org.elasticsearch.search.suggest.SuggestionSearchContext;
import org.elasticsearch.search.suggest.term.TermSuggestion;
import org.elasticsearch.search.suggest.term.TermSuggestionBuilder;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions;
import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentBuilder;
@ -58,7 +72,7 @@ public class SearchTimeoutIT extends ESIntegTestCase {
@Override @Override
protected Collection<Class<? extends Plugin>> nodePlugins() { protected Collection<Class<? extends Plugin>> nodePlugins() {
return Collections.singleton(BulkScorerTimeoutQueryPlugin.class); return Collections.singleton(SearchTimeoutPlugin.class);
} }
@Override @Override
@ -72,6 +86,9 @@ public class SearchTimeoutIT extends ESIntegTestCase {
indexRandom(true, "test", randomIntBetween(20, 50)); indexRandom(true, "test", randomIntBetween(20, 50));
} }
/**
* Test the scenario where the query times out before starting to collect documents, verify that partial hits are not returned
*/
public void testTopHitsTimeoutBeforeCollecting() { public void testTopHitsTimeoutBeforeCollecting() {
// setting the timeout is necessary only because we check that if a TimeExceededException is thrown, a timeout was set // setting the timeout is necessary only because we check that if a TimeExceededException is thrown, a timeout was set
SearchRequestBuilder searchRequestBuilder = prepareSearch("test").setTimeout(new TimeValue(10, TimeUnit.SECONDS)) SearchRequestBuilder searchRequestBuilder = prepareSearch("test").setTimeout(new TimeValue(10, TimeUnit.SECONDS))
@ -88,6 +105,9 @@ public class SearchTimeoutIT extends ESIntegTestCase {
}); });
} }
/**
* Test the scenario where the query times out while collecting documents, verify that partial hits results are returned
*/
public void testTopHitsTimeoutWhileCollecting() { public void testTopHitsTimeoutWhileCollecting() {
// setting the timeout is necessary only because we check that if a TimeExceededException is thrown, a timeout was set // setting the timeout is necessary only because we check that if a TimeExceededException is thrown, a timeout was set
SearchRequestBuilder searchRequestBuilder = prepareSearch("test").setTimeout(new TimeValue(10, TimeUnit.SECONDS)) SearchRequestBuilder searchRequestBuilder = prepareSearch("test").setTimeout(new TimeValue(10, TimeUnit.SECONDS))
@ -103,6 +123,9 @@ public class SearchTimeoutIT extends ESIntegTestCase {
}); });
} }
/**
* Test the scenario where the query times out before starting to collect documents, verify that partial aggs results are not returned
*/
public void testAggsTimeoutBeforeCollecting() { public void testAggsTimeoutBeforeCollecting() {
SearchRequestBuilder searchRequestBuilder = prepareSearch("test").setSize(0) SearchRequestBuilder searchRequestBuilder = prepareSearch("test").setSize(0)
// setting the timeout is necessary only because we check that if a TimeExceededException is thrown, a timeout was set // setting the timeout is necessary only because we check that if a TimeExceededException is thrown, a timeout was set
@ -123,6 +146,9 @@ public class SearchTimeoutIT extends ESIntegTestCase {
}); });
} }
/**
* Test the scenario where the query times out while collecting documents, verify that partial aggs results are returned
*/
public void testAggsTimeoutWhileCollecting() { public void testAggsTimeoutWhileCollecting() {
SearchRequestBuilder searchRequestBuilder = prepareSearch("test").setSize(0) SearchRequestBuilder searchRequestBuilder = prepareSearch("test").setSize(0)
// setting the timeout is necessary only because we check that if a TimeExceededException is thrown, a timeout was set // setting the timeout is necessary only because we check that if a TimeExceededException is thrown, a timeout was set
@ -145,6 +171,56 @@ public class SearchTimeoutIT extends ESIntegTestCase {
}); });
} }
/**
* Test the scenario where the suggest phase (part of the query phase) times out, yet there are results
* available coming from executing the query and aggs on each shard.
*/
public void testSuggestTimeoutWithPartialResults() {
SuggestBuilder suggestBuilder = new SuggestBuilder();
suggestBuilder.setGlobalText("text");
TimeoutSuggestionBuilder timeoutSuggestionBuilder = new TimeoutSuggestionBuilder();
suggestBuilder.addSuggestion("suggest", timeoutSuggestionBuilder);
SearchRequestBuilder searchRequestBuilder = prepareSearch("test").suggest(suggestBuilder)
.addAggregation(new TermsAggregationBuilder("terms").field("field.keyword"));
ElasticsearchAssertions.assertResponse(searchRequestBuilder, searchResponse -> {
assertThat(searchResponse.isTimedOut(), equalTo(true));
assertEquals(0, searchResponse.getShardFailures().length);
assertEquals(0, searchResponse.getFailedShards());
assertThat(searchResponse.getSuccessfulShards(), greaterThan(0));
assertEquals(searchResponse.getSuccessfulShards(), searchResponse.getTotalShards());
assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L));
assertThat(searchResponse.getHits().getHits().length, greaterThan(0));
StringTerms terms = searchResponse.getAggregations().get("terms");
assertEquals(1, terms.getBuckets().size());
StringTerms.Bucket bucket = terms.getBuckets().get(0);
assertEquals("value", bucket.getKeyAsString());
assertThat(bucket.getDocCount(), greaterThan(0L));
});
}
/**
* Test the scenario where the rescore phase (part of the query phase) times out, yet there are results
* available coming from executing the query and aggs on each shard.
*/
public void testRescoreTimeoutWithPartialResults() {
SearchRequestBuilder searchRequestBuilder = prepareSearch("test").setRescorer(new TimeoutRescorerBuilder())
.addAggregation(new TermsAggregationBuilder("terms").field("field.keyword"));
ElasticsearchAssertions.assertResponse(searchRequestBuilder, searchResponse -> {
assertThat(searchResponse.isTimedOut(), equalTo(true));
assertEquals(0, searchResponse.getShardFailures().length);
assertEquals(0, searchResponse.getFailedShards());
assertThat(searchResponse.getSuccessfulShards(), greaterThan(0));
assertEquals(searchResponse.getSuccessfulShards(), searchResponse.getTotalShards());
assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L));
assertThat(searchResponse.getHits().getHits().length, greaterThan(0));
StringTerms terms = searchResponse.getAggregations().get("terms");
assertEquals(1, terms.getBuckets().size());
StringTerms.Bucket bucket = terms.getBuckets().get(0);
assertEquals("value", bucket.getKeyAsString());
assertThat(bucket.getDocCount(), greaterThan(0L));
});
}
public void testPartialResultsIntolerantTimeoutBeforeCollecting() { public void testPartialResultsIntolerantTimeoutBeforeCollecting() {
ElasticsearchException ex = expectThrows( ElasticsearchException ex = expectThrows(
ElasticsearchException.class, ElasticsearchException.class,
@ -171,13 +247,67 @@ public class SearchTimeoutIT extends ESIntegTestCase {
assertEquals(429, ex.status().getStatus()); assertEquals(429, ex.status().getStatus());
} }
public static final class BulkScorerTimeoutQueryPlugin extends Plugin implements SearchPlugin { public void testPartialResultsIntolerantTimeoutWhileSuggestingOnly() {
SuggestBuilder suggestBuilder = new SuggestBuilder();
suggestBuilder.setGlobalText("text");
TimeoutSuggestionBuilder timeoutSuggestionBuilder = new TimeoutSuggestionBuilder();
suggestBuilder.addSuggestion("suggest", timeoutSuggestionBuilder);
ElasticsearchException ex = expectThrows(
ElasticsearchException.class,
prepareSearch("test").suggest(suggestBuilder).setAllowPartialSearchResults(false) // this line causes timeouts to report
// failures
);
assertTrue(ex.toString().contains("Time exceeded"));
assertEquals(429, ex.status().getStatus());
}
public void testPartialResultsIntolerantTimeoutWhileSuggesting() {
SuggestBuilder suggestBuilder = new SuggestBuilder();
suggestBuilder.setGlobalText("text");
TimeoutSuggestionBuilder timeoutSuggestionBuilder = new TimeoutSuggestionBuilder();
suggestBuilder.addSuggestion("suggest", timeoutSuggestionBuilder);
ElasticsearchException ex = expectThrows(
ElasticsearchException.class,
prepareSearch("test").setQuery(new TermQueryBuilder("field", "value"))
.suggest(suggestBuilder)
.setAllowPartialSearchResults(false) // this line causes timeouts to report failures
);
assertTrue(ex.toString().contains("Time exceeded"));
assertEquals(429, ex.status().getStatus());
}
public void testPartialResultsIntolerantTimeoutWhileRescoring() {
ElasticsearchException ex = expectThrows(
ElasticsearchException.class,
prepareSearch("test").setQuery(new TermQueryBuilder("field", "value"))
.setRescorer(new TimeoutRescorerBuilder())
.setAllowPartialSearchResults(false) // this line causes timeouts to report failures
);
assertTrue(ex.toString().contains("Time exceeded"));
assertEquals(429, ex.status().getStatus());
}
public static final class SearchTimeoutPlugin extends Plugin implements SearchPlugin {
@Override @Override
public List<QuerySpec<?>> getQueries() { public List<QuerySpec<?>> getQueries() {
return Collections.singletonList(new QuerySpec<QueryBuilder>("timeout", BulkScorerTimeoutQuery::new, parser -> { return Collections.singletonList(new QuerySpec<QueryBuilder>("timeout", BulkScorerTimeoutQuery::new, parser -> {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
})); }));
} }
@Override
public List<SuggesterSpec<?>> getSuggesters() {
return Collections.singletonList(new SuggesterSpec<>("timeout", TimeoutSuggestionBuilder::new, parser -> {
throw new UnsupportedOperationException();
}, TermSuggestion::new));
}
@Override
public List<RescorerSpec<?>> getRescorers() {
return Collections.singletonList(new RescorerSpec<>("timeout", TimeoutRescorerBuilder::new, parser -> {
throw new UnsupportedOperationException();
}));
}
} }
/** /**
@ -315,4 +445,111 @@ public class SearchTimeoutIT extends ESIntegTestCase {
return null; return null;
} }
} }
/**
* Suggestion builder that triggers a timeout as part of its execution
*/
private static final class TimeoutSuggestionBuilder extends TermSuggestionBuilder {
TimeoutSuggestionBuilder() {
super("field");
}
TimeoutSuggestionBuilder(StreamInput in) throws IOException {
super(in);
}
@Override
public String getWriteableName() {
return "timeout";
}
@Override
public SuggestionSearchContext.SuggestionContext build(SearchExecutionContext context) {
return new TimeoutSuggestionContext(new TimeoutSuggester((ContextIndexSearcher) context.searcher()), context);
}
}
private static final class TimeoutSuggester extends Suggester<TimeoutSuggestionContext> {
private final ContextIndexSearcher contextIndexSearcher;
TimeoutSuggester(ContextIndexSearcher contextIndexSearcher) {
this.contextIndexSearcher = contextIndexSearcher;
}
@Override
protected TermSuggestion innerExecute(
String name,
TimeoutSuggestionContext suggestion,
IndexSearcher searcher,
CharsRefBuilder spare
) {
contextIndexSearcher.throwTimeExceededException();
assert false;
return new TermSuggestion(name, suggestion.getSize(), SortBy.SCORE);
}
@Override
protected TermSuggestion emptySuggestion(String name, TimeoutSuggestionContext suggestion, CharsRefBuilder spare) {
return new TermSuggestion(name, suggestion.getSize(), SortBy.SCORE);
}
}
private static final class TimeoutSuggestionContext extends SuggestionSearchContext.SuggestionContext {
TimeoutSuggestionContext(Suggester<?> suggester, SearchExecutionContext searchExecutionContext) {
super(suggester, searchExecutionContext);
}
}
private static final class TimeoutRescorerBuilder extends RescorerBuilder<TimeoutRescorerBuilder> {
TimeoutRescorerBuilder() {
super();
}
TimeoutRescorerBuilder(StreamInput in) throws IOException {
super(in);
}
@Override
protected void doWriteTo(StreamOutput out) {}
@Override
protected void doXContent(XContentBuilder builder, Params params) {}
@Override
protected RescoreContext innerBuildContext(int windowSize, SearchExecutionContext context) throws IOException {
return new RescoreContext(10, new Rescorer() {
@Override
public TopDocs rescore(TopDocs topDocs, IndexSearcher searcher, RescoreContext rescoreContext) {
((ContextIndexSearcher) context.searcher()).throwTimeExceededException();
assert false;
return null;
}
@Override
public Explanation explain(
int topLevelDocId,
IndexSearcher searcher,
RescoreContext rescoreContext,
Explanation sourceExplanation
) {
throw new UnsupportedOperationException();
}
});
}
@Override
public String getWriteableName() {
return "timeout";
}
@Override
public TransportVersion getMinimalSupportedVersion() {
return null;
}
@Override
public RescorerBuilder<TimeoutRescorerBuilder> rewrite(QueryRewriteContext ctx) {
return this;
}
}
} }

View file

@ -24,7 +24,6 @@ import org.elasticsearch.common.lucene.search.function.LeafScoreFunction;
import org.elasticsearch.common.lucene.search.function.ScoreFunction; import org.elasticsearch.common.lucene.search.function.ScoreFunction;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.Settings.Builder; import org.elasticsearch.common.settings.Settings.Builder;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.Operator;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryBuilders;
@ -994,22 +993,6 @@ public class QueryRescorerIT extends ESIntegTestCase {
}); });
} }
public void testRescoreWithTimeout() throws Exception {
// no dummy docs since merges can change scores while we run queries.
int numDocs = indexRandomNumbers("whitespace", -1, false);
String intToEnglish = English.intToEnglish(between(0, numDocs - 1));
String query = intToEnglish.split(" ")[0];
assertResponse(
prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH)
.setQuery(QueryBuilders.matchQuery("field1", query).operator(Operator.OR))
.setSize(10)
.addRescorer(new QueryRescorerBuilder(functionScoreQuery(new TestTimedScoreFunctionBuilder())).windowSize(100))
.setTimeout(TimeValue.timeValueMillis(10)),
r -> assertTrue(r.isTimedOut())
);
}
@Override @Override
protected Collection<Class<? extends Plugin>> nodePlugins() { protected Collection<Class<? extends Plugin>> nodePlugins() {
return List.of(TestTimedQueryPlugin.class); return List.of(TestTimedQueryPlugin.class);

View file

@ -174,11 +174,13 @@ public class TransportVersions {
public static final TransportVersion INFERENCE_REQUEST_ADAPTIVE_RATE_LIMITING = def(8_839_0_00); public static final TransportVersion INFERENCE_REQUEST_ADAPTIVE_RATE_LIMITING = def(8_839_0_00);
public static final TransportVersion ML_INFERENCE_IBM_WATSONX_RERANK_ADDED = def(8_840_0_00); public static final TransportVersion ML_INFERENCE_IBM_WATSONX_RERANK_ADDED = def(8_840_0_00);
public static final TransportVersion COHERE_BIT_EMBEDDING_TYPE_SUPPORT_ADDED_BACKPORT_8_X = def(8_840_0_01); public static final TransportVersion COHERE_BIT_EMBEDDING_TYPE_SUPPORT_ADDED_BACKPORT_8_X = def(8_840_0_01);
public static final TransportVersion REMOVE_ALL_APPLICABLE_SELECTOR_BACKPORT_8_X = def(8_840_0_02);
public static final TransportVersion ELASTICSEARCH_9_0 = def(9_000_0_00); public static final TransportVersion ELASTICSEARCH_9_0 = def(9_000_0_00);
public static final TransportVersion REMOVE_SNAPSHOT_FAILURES_90 = def(9_000_0_01); public static final TransportVersion REMOVE_SNAPSHOT_FAILURES_90 = def(9_000_0_01);
public static final TransportVersion TRANSPORT_STATS_HANDLING_TIME_REQUIRED_90 = def(9_000_0_02); public static final TransportVersion TRANSPORT_STATS_HANDLING_TIME_REQUIRED_90 = def(9_000_0_02);
public static final TransportVersion REMOVE_DESIRED_NODE_VERSION_90 = def(9_000_0_03); public static final TransportVersion REMOVE_DESIRED_NODE_VERSION_90 = def(9_000_0_03);
public static final TransportVersion ESQL_DRIVER_TASK_DESCRIPTION_90 = def(9_000_0_04); public static final TransportVersion ESQL_DRIVER_TASK_DESCRIPTION_90 = def(9_000_0_04);
public static final TransportVersion REMOVE_ALL_APPLICABLE_SELECTOR_9_0 = def(9_000_0_05);
public static final TransportVersion COHERE_BIT_EMBEDDING_TYPE_SUPPORT_ADDED = def(9_001_0_00); public static final TransportVersion COHERE_BIT_EMBEDDING_TYPE_SUPPORT_ADDED = def(9_001_0_00);
public static final TransportVersion REMOVE_SNAPSHOT_FAILURES = def(9_002_0_00); public static final TransportVersion REMOVE_SNAPSHOT_FAILURES = def(9_002_0_00);
public static final TransportVersion TRANSPORT_STATS_HANDLING_TIME_REQUIRED = def(9_003_0_00); public static final TransportVersion TRANSPORT_STATS_HANDLING_TIME_REQUIRED = def(9_003_0_00);
@ -186,6 +188,9 @@ public class TransportVersions {
public static final TransportVersion ESQL_DRIVER_TASK_DESCRIPTION = def(9_005_0_00); public static final TransportVersion ESQL_DRIVER_TASK_DESCRIPTION = def(9_005_0_00);
public static final TransportVersion ESQL_RETRY_ON_SHARD_LEVEL_FAILURE = def(9_006_0_00); public static final TransportVersion ESQL_RETRY_ON_SHARD_LEVEL_FAILURE = def(9_006_0_00);
public static final TransportVersion ESQL_PROFILE_ASYNC_NANOS = def(9_007_00_0); public static final TransportVersion ESQL_PROFILE_ASYNC_NANOS = def(9_007_00_0);
public static final TransportVersion ESQL_LOOKUP_JOIN_SOURCE_TEXT = def(9_008_0_00);
public static final TransportVersion REMOVE_ALL_APPLICABLE_SELECTOR = def(9_009_0_00);
public static final TransportVersion SLM_UNHEALTHY_IF_NO_SNAPSHOT_WITHIN = def(9_010_0_00);
/* /*
* WARNING: DO NOT MERGE INTO MAIN! * WARNING: DO NOT MERGE INTO MAIN!
@ -208,6 +213,8 @@ public class TransportVersions {
* A new transport version should be added EVERY TIME a change is made to the serialization protocol of one or more classes. Each * A new transport version should be added EVERY TIME a change is made to the serialization protocol of one or more classes. Each
* transport version should only be used in a single merged commit (apart from the BwC versions copied from o.e.Version, V_8_8_1). * transport version should only be used in a single merged commit (apart from the BwC versions copied from o.e.Version, V_8_8_1).
* *
* More information about versions and backporting at docs/internal/Versioning.md
*
* ADDING A TRANSPORT VERSION * ADDING A TRANSPORT VERSION
* To add a new transport version, add a new constant at the bottom of the list, above this comment. Don't add other lines, * To add a new transport version, add a new constant at the bottom of the list, above this comment. Don't add other lines,
* comments, etc. The version id has the following layout: * comments, etc. The version id has the following layout:

View file

@ -12,6 +12,7 @@ package org.elasticsearch.action.admin.indices.forcemerge;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.ActionRunnable;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.SubscribableListener;
import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.broadcast.BroadcastResponse;
import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
@ -96,12 +97,16 @@ public class TransportForceMergeAction extends TransportBroadcastByNodeAction<
ActionListener<TransportBroadcastByNodeAction.EmptyResult> listener ActionListener<TransportBroadcastByNodeAction.EmptyResult> listener
) { ) {
assert (task instanceof CancellableTask) == false; // TODO: add cancellation handling here once the task supports it assert (task instanceof CancellableTask) == false; // TODO: add cancellation handling here once the task supports it
threadPool.executor(ThreadPool.Names.FORCE_MERGE).execute(ActionRunnable.supply(listener, () -> { SubscribableListener.<IndexShard>newForked(l -> {
IndexShard indexShard = indicesService.indexServiceSafe(shardRouting.shardId().getIndex()) IndexShard indexShard = indicesService.indexServiceSafe(shardRouting.shardId().getIndex())
.getShard(shardRouting.shardId().id()); .getShard(shardRouting.shardId().id());
indexShard.forceMerge(request); indexShard.ensureMutable(l.map(unused -> indexShard));
return EmptyResult.INSTANCE; }).<EmptyResult>andThen((l, indexShard) -> {
})); threadPool.executor(ThreadPool.Names.FORCE_MERGE).execute(ActionRunnable.supply(l, () -> {
indexShard.forceMerge(request);
return EmptyResult.INSTANCE;
}));
}).addListener(listener);
} }
/** /**

View file

@ -656,10 +656,6 @@ public class ResolveIndexAction extends ActionType<ResolveIndexAction.Response>
: switch (resolvedExpression.selector()) { : switch (resolvedExpression.selector()) {
case DATA -> dataStream.getDataComponent().getIndices().stream(); case DATA -> dataStream.getDataComponent().getIndices().stream();
case FAILURES -> dataStream.getFailureIndices().stream(); case FAILURES -> dataStream.getFailureIndices().stream();
case ALL_APPLICABLE -> Stream.concat(
dataStream.getIndices().stream(),
dataStream.getFailureIndices().stream()
);
}; };
String[] backingIndices = dataStreamIndices.map(Index::getName).toArray(String[]::new); String[] backingIndices = dataStreamIndices.map(Index::getName).toArray(String[]::new);
dataStreams.add(new ResolvedDataStream(dataStream.getName(), backingIndices, DataStream.TIMESTAMP_FIELD_NAME)); dataStreams.add(new ResolvedDataStream(dataStream.getName(), backingIndices, DataStream.TIMESTAMP_FIELD_NAME));
@ -684,13 +680,6 @@ public class ResolveIndexAction extends ActionType<ResolveIndexAction.Response>
assert ia.isDataStreamRelated() : "Illegal selector [failures] used on non data stream alias"; assert ia.isDataStreamRelated() : "Illegal selector [failures] used on non data stream alias";
yield ia.getFailureIndices(metadata).stream(); yield ia.getFailureIndices(metadata).stream();
} }
case ALL_APPLICABLE -> {
if (ia.isDataStreamRelated()) {
yield Stream.concat(ia.getIndices().stream(), ia.getFailureIndices(metadata).stream());
} else {
yield ia.getIndices().stream();
}
}
}; };
} }
return aliasIndices; return aliasIndices;

View file

@ -13,14 +13,13 @@ import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.ActiveShardCount;
import org.elasticsearch.action.support.IndexComponentSelector;
import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedRequest;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.ResolvedExpression;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.SelectorResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.SelectorResolver;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.indices.InvalidIndexNameException;
import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.CancellableTask;
import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskId;
@ -126,14 +125,12 @@ public class RolloverRequest extends AcknowledgedRequest<RolloverRequest> implem
); );
} }
// Ensure we have a valid selector in the request
if (rolloverTarget != null) { if (rolloverTarget != null) {
ResolvedExpression resolvedExpression = SelectorResolver.parseExpression(rolloverTarget, indicesOptions); try {
IndexComponentSelector selector = resolvedExpression.selector(); SelectorResolver.parseExpression(rolloverTarget, indicesOptions);
if (IndexComponentSelector.ALL_APPLICABLE.equals(selector)) { } catch (InvalidIndexNameException exception) {
validationException = addValidationError( validationException = addValidationError(exception.getMessage(), validationException);
"rollover cannot be applied to both regular and failure indices at the same time",
validationException
);
} }
} }

View file

@ -12,7 +12,6 @@ package org.elasticsearch.action.search;
import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardsIterator; import org.elasticsearch.cluster.routing.ShardsIterator;
import org.elasticsearch.common.util.Countable;
import org.elasticsearch.common.util.PlainIterator; import org.elasticsearch.common.util.PlainIterator;
import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Nullable;
import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.TimeValue;
@ -29,7 +28,7 @@ import java.util.Objects;
* the cluster alias. * the cluster alias.
* @see OriginalIndices * @see OriginalIndices
*/ */
public final class SearchShardIterator implements Comparable<SearchShardIterator>, Countable { public final class SearchShardIterator implements Comparable<SearchShardIterator> {
private final OriginalIndices originalIndices; private final OriginalIndices originalIndices;
private final String clusterAlias; private final String clusterAlias;
@ -171,7 +170,6 @@ public final class SearchShardIterator implements Comparable<SearchShardIterator
* *
* @return number of shard routing instances in this iterator * @return number of shard routing instances in this iterator
*/ */
@Override
public int size() { public int size() {
return targetNodesIterator.size(); return targetNodesIterator.size();
} }

View file

@ -9,6 +9,7 @@
package org.elasticsearch.action.support; package org.elasticsearch.action.support;
import org.elasticsearch.TransportVersions;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
@ -23,14 +24,11 @@ import java.util.Map;
* We define as index components the two different sets of indices a data stream could consist of: * We define as index components the two different sets of indices a data stream could consist of:
* - DATA: represents the backing indices * - DATA: represents the backing indices
* - FAILURES: represent the failing indices * - FAILURES: represent the failing indices
* - ALL: represents all available in this expression components, meaning if it's a data stream both backing and failure indices and if it's
* an index only the index itself.
* Note: An index is its own DATA component, but it cannot have a FAILURE component. * Note: An index is its own DATA component, but it cannot have a FAILURE component.
*/ */
public enum IndexComponentSelector implements Writeable { public enum IndexComponentSelector implements Writeable {
DATA("data", (byte) 0), DATA("data", (byte) 0),
FAILURES("failures", (byte) 1), FAILURES("failures", (byte) 1);
ALL_APPLICABLE("*", (byte) 2);
private final String key; private final String key;
private final byte id; private final byte id;
@ -75,7 +73,15 @@ public enum IndexComponentSelector implements Writeable {
} }
public static IndexComponentSelector read(StreamInput in) throws IOException { public static IndexComponentSelector read(StreamInput in) throws IOException {
return getById(in.readByte()); byte id = in.readByte();
if (in.getTransportVersion().onOrAfter(TransportVersions.REMOVE_ALL_APPLICABLE_SELECTOR)
|| in.getTransportVersion().isPatchFrom(TransportVersions.REMOVE_ALL_APPLICABLE_SELECTOR_9_0)
|| in.getTransportVersion().isPatchFrom(TransportVersions.REMOVE_ALL_APPLICABLE_SELECTOR_BACKPORT_8_X)) {
return getById(id);
} else {
// Legacy value ::*, converted to ::data
return id == 2 ? DATA : getById(id);
}
} }
// Visible for testing // Visible for testing
@ -95,10 +101,10 @@ public enum IndexComponentSelector implements Writeable {
} }
public boolean shouldIncludeData() { public boolean shouldIncludeData() {
return this == ALL_APPLICABLE || this == DATA; return this == DATA;
} }
public boolean shouldIncludeFailures() { public boolean shouldIncludeFailures() {
return this == ALL_APPLICABLE || this == FAILURES; return this == FAILURES;
} }
} }

View file

@ -23,6 +23,7 @@ import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.AutoCreateIndex; import org.elasticsearch.action.support.AutoCreateIndex;
import org.elasticsearch.action.support.SubscribableListener;
import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.TransportActions;
import org.elasticsearch.action.support.single.instance.TransportInstanceSingleOperationAction; import org.elasticsearch.action.support.single.instance.TransportInstanceSingleOperationAction;
import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.client.internal.node.NodeClient;
@ -107,7 +108,10 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
@Override @Override
protected Executor executor(ShardId shardId) { protected Executor executor(ShardId shardId) {
final IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); return executor(indicesService.indexServiceSafe(shardId.getIndex()));
}
private Executor executor(IndexService indexService) {
return threadPool.executor(indexService.getIndexSettings().getIndexMetadata().isSystem() ? Names.SYSTEM_WRITE : Names.WRITE); return threadPool.executor(indexService.getIndexSettings().getIndexMetadata().isSystem() ? Names.SYSTEM_WRITE : Names.WRITE);
} }
@ -201,136 +205,148 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
final IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); final IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex());
final IndexShard indexShard = indexService.getShard(shardId.getId()); final IndexShard indexShard = indexService.getShard(shardId.getId());
final MappingLookup mappingLookup = indexShard.mapperService().mappingLookup(); final MappingLookup mappingLookup = indexShard.mapperService().mappingLookup();
final UpdateHelper.Result result = deleteInferenceResults(
request,
updateHelper.prepare(request, indexShard, threadPool::absoluteTimeInMillis),
indexService.getMetadata(),
mappingLookup
);
switch (result.getResponseResult()) { var executor = executor(indexService);
case CREATED -> { assert ThreadPool.assertCurrentThreadPool(Names.SYSTEM_WRITE, Names.WRITE);
IndexRequest upsertRequest = result.action();
// we fetch it from the index request so we don't generate the bytes twice, its already done in the index request SubscribableListener.newForked(indexShard::ensureMutable)
final BytesReference upsertSourceBytes = upsertRequest.source(); // Make sure to fork back to a `write` thread pool if necessary
client.bulk( .<UpdateHelper.Result>andThen(executor, threadPool.getThreadContext(), (l, unused) -> ActionListener.completeWith(l, () -> {
toSingleItemBulkRequest(upsertRequest), assert ThreadPool.assertCurrentThreadPool(Names.SYSTEM_WRITE, Names.WRITE);
unwrappingSingleItemBulkResponse(ActionListener.<DocWriteResponse>wrap(response -> { return deleteInferenceResults(
UpdateResponse update = new UpdateResponse( request,
response.getShardInfo(), updateHelper.prepare(request, indexShard, threadPool::absoluteTimeInMillis), // Gets the doc using the engine
response.getShardId(), indexService.getMetadata(),
response.getId(), mappingLookup
response.getSeqNo(), );
response.getPrimaryTerm(), }))
response.getVersion(), // Proceed with a single item bulk request
response.getResult() .<UpdateResponse>andThen((l, result) -> {
); switch (result.getResponseResult()) {
if (request.fetchSource() != null && request.fetchSource().fetchSource()) { case CREATED -> {
Tuple<XContentType, Map<String, Object>> sourceAndContent = XContentHelper.convertToMap( IndexRequest upsertRequest = result.action();
upsertSourceBytes, // we fetch it from the index request so we don't generate the bytes twice, its already done in the index request
true, final BytesReference upsertSourceBytes = upsertRequest.source();
upsertRequest.getContentType() client.bulk(
); toSingleItemBulkRequest(upsertRequest),
update.setGetResult( unwrappingSingleItemBulkResponse(ActionListener.<DocWriteResponse>wrap(response -> {
UpdateHelper.extractGetResult( UpdateResponse update = new UpdateResponse(
request, response.getShardInfo(),
request.concreteIndex(), response.getShardId(),
mappingLookup, response.getId(),
response.getSeqNo(), response.getSeqNo(),
response.getPrimaryTerm(), response.getPrimaryTerm(),
response.getVersion(), response.getVersion(),
sourceAndContent.v2(), response.getResult()
sourceAndContent.v1(), );
upsertSourceBytes if (request.fetchSource() != null && request.fetchSource().fetchSource()) {
) Tuple<XContentType, Map<String, Object>> sourceAndContent = XContentHelper.convertToMap(
); upsertSourceBytes,
} else { true,
update.setGetResult(null); upsertRequest.getContentType()
} );
update.setForcedRefresh(response.forcedRefresh()); update.setGetResult(
listener.onResponse(update); UpdateHelper.extractGetResult(
}, exception -> handleUpdateFailureWithRetry(listener, request, exception, retryCount))) request,
); request.concreteIndex(),
} mappingLookup,
case UPDATED -> { response.getSeqNo(),
IndexRequest indexRequest = result.action(); response.getPrimaryTerm(),
// we fetch it from the index request so we don't generate the bytes twice, its already done in the index request response.getVersion(),
final BytesReference indexSourceBytes = indexRequest.source(); sourceAndContent.v2(),
client.bulk( sourceAndContent.v1(),
toSingleItemBulkRequest(indexRequest), upsertSourceBytes
unwrappingSingleItemBulkResponse(ActionListener.<DocWriteResponse>wrap(response -> { )
UpdateResponse update = new UpdateResponse( );
response.getShardInfo(), } else {
response.getShardId(), update.setGetResult(null);
response.getId(), }
response.getSeqNo(), update.setForcedRefresh(response.forcedRefresh());
response.getPrimaryTerm(), l.onResponse(update);
response.getVersion(), }, exception -> handleUpdateFailureWithRetry(l, request, exception, retryCount)))
response.getResult()
); );
update.setGetResult(
UpdateHelper.extractGetResult(
request,
request.concreteIndex(),
mappingLookup,
response.getSeqNo(),
response.getPrimaryTerm(),
response.getVersion(),
result.updatedSourceAsMap(),
result.updateSourceContentType(),
indexSourceBytes
)
);
update.setForcedRefresh(response.forcedRefresh());
listener.onResponse(update);
}, exception -> handleUpdateFailureWithRetry(listener, request, exception, retryCount)))
);
}
case DELETED -> {
DeleteRequest deleteRequest = result.action();
client.bulk(
toSingleItemBulkRequest(deleteRequest),
unwrappingSingleItemBulkResponse(ActionListener.<DeleteResponse>wrap(response -> {
UpdateResponse update = new UpdateResponse(
response.getShardInfo(),
response.getShardId(),
response.getId(),
response.getSeqNo(),
response.getPrimaryTerm(),
response.getVersion(),
response.getResult()
);
update.setGetResult(
UpdateHelper.extractGetResult(
request,
request.concreteIndex(),
mappingLookup,
response.getSeqNo(),
response.getPrimaryTerm(),
response.getVersion(),
result.updatedSourceAsMap(),
result.updateSourceContentType(),
null
)
);
update.setForcedRefresh(response.forcedRefresh());
listener.onResponse(update);
}, exception -> handleUpdateFailureWithRetry(listener, request, exception, retryCount)))
);
}
case NOOP -> {
UpdateResponse update = result.action();
IndexService indexServiceOrNull = indicesService.indexService(shardId.getIndex());
if (indexServiceOrNull != null) {
IndexShard shard = indexService.getShardOrNull(shardId.getId());
if (shard != null) {
shard.noopUpdate();
} }
case UPDATED -> {
IndexRequest indexRequest = result.action();
// we fetch it from the index request so we don't generate the bytes twice, its already done in the index request
final BytesReference indexSourceBytes = indexRequest.source();
client.bulk(
toSingleItemBulkRequest(indexRequest),
unwrappingSingleItemBulkResponse(ActionListener.<DocWriteResponse>wrap(response -> {
UpdateResponse update = new UpdateResponse(
response.getShardInfo(),
response.getShardId(),
response.getId(),
response.getSeqNo(),
response.getPrimaryTerm(),
response.getVersion(),
response.getResult()
);
update.setGetResult(
UpdateHelper.extractGetResult(
request,
request.concreteIndex(),
mappingLookup,
response.getSeqNo(),
response.getPrimaryTerm(),
response.getVersion(),
result.updatedSourceAsMap(),
result.updateSourceContentType(),
indexSourceBytes
)
);
update.setForcedRefresh(response.forcedRefresh());
l.onResponse(update);
}, exception -> handleUpdateFailureWithRetry(l, request, exception, retryCount)))
);
}
case DELETED -> {
DeleteRequest deleteRequest = result.action();
client.bulk(
toSingleItemBulkRequest(deleteRequest),
unwrappingSingleItemBulkResponse(ActionListener.<DeleteResponse>wrap(response -> {
UpdateResponse update = new UpdateResponse(
response.getShardInfo(),
response.getShardId(),
response.getId(),
response.getSeqNo(),
response.getPrimaryTerm(),
response.getVersion(),
response.getResult()
);
update.setGetResult(
UpdateHelper.extractGetResult(
request,
request.concreteIndex(),
mappingLookup,
response.getSeqNo(),
response.getPrimaryTerm(),
response.getVersion(),
result.updatedSourceAsMap(),
result.updateSourceContentType(),
null
)
);
update.setForcedRefresh(response.forcedRefresh());
l.onResponse(update);
}, exception -> handleUpdateFailureWithRetry(l, request, exception, retryCount)))
);
}
case NOOP -> {
UpdateResponse update = result.action();
IndexService indexServiceOrNull = indicesService.indexService(shardId.getIndex());
if (indexServiceOrNull != null) {
IndexShard shard = indexService.getShardOrNull(shardId.getId());
if (shard != null) {
shard.noopUpdate();
}
}
l.onResponse(update);
}
default -> throw new IllegalStateException("Illegal result " + result.getResponseResult());
} }
listener.onResponse(update); })
} .addListener(listener);
default -> throw new IllegalStateException("Illegal result " + result.getResponseResult());
}
} }
private void handleUpdateFailureWithRetry( private void handleUpdateFailureWithRetry(

View file

@ -249,7 +249,8 @@ class Elasticsearch {
nodeEnv.configDir(), nodeEnv.configDir(),
nodeEnv.tmpDir() nodeEnv.tmpDir()
); );
} else if (RuntimeVersionFeature.isSecurityManagerAvailable()) { } else {
assert RuntimeVersionFeature.isSecurityManagerAvailable();
// no need to explicitly enable native access for legacy code // no need to explicitly enable native access for legacy code
pluginsLoader = PluginsLoader.createPluginsLoader(modulesBundles, pluginsBundles, Map.of()); pluginsLoader = PluginsLoader.createPluginsLoader(modulesBundles, pluginsBundles, Map.of());
// install SM after natives, shutdown hooks, etc. // install SM after natives, shutdown hooks, etc.
@ -259,10 +260,6 @@ class Elasticsearch {
SECURITY_FILTER_BAD_DEFAULTS_SETTING.get(args.nodeSettings()), SECURITY_FILTER_BAD_DEFAULTS_SETTING.get(args.nodeSettings()),
args.pidFile() args.pidFile()
); );
} else {
// TODO: should we throw/interrupt startup in this case?
pluginsLoader = PluginsLoader.createPluginsLoader(modulesBundles, pluginsBundles, Map.of());
LogManager.getLogger(Elasticsearch.class).warn("Bootstrapping without any protection");
} }
bootstrap.setPluginsLoader(pluginsLoader); bootstrap.setPluginsLoader(pluginsLoader);

View file

@ -156,9 +156,8 @@ public record AutoExpandReplicas(int minReplicas, int maxReplicas, boolean enabl
)) { )) {
if (indexMetadata.getNumberOfReplicas() == 0) { if (indexMetadata.getNumberOfReplicas() == 0) {
nrReplicasChanged.computeIfAbsent(1, ArrayList::new).add(indexMetadata.getIndex().getName()); nrReplicasChanged.computeIfAbsent(1, ArrayList::new).add(indexMetadata.getIndex().getName());
} else {
continue;
} }
continue;
} }
if (allocation == null) { if (allocation == null) {
allocation = allocationSupplier.get(); allocation = allocationSupplier.get();

View file

@ -81,8 +81,7 @@ public class IndexAbstractionResolver {
indexNameExpressionResolver, indexNameExpressionResolver,
includeDataStreams includeDataStreams
)) { )) {
// Resolve any ::* suffixes on the expression. We need to resolve them all to their final valid selectors resolveSelectorsAndCollect(authorizedIndex, selectorString, indicesOptions, resolvedIndices, projectMetadata);
resolveSelectorsAndCombine(authorizedIndex, selectorString, indicesOptions, resolvedIndices, projectMetadata);
} }
} }
if (resolvedIndices.isEmpty()) { if (resolvedIndices.isEmpty()) {
@ -98,9 +97,8 @@ public class IndexAbstractionResolver {
} }
} }
} else { } else {
// Resolve any ::* suffixes on the expression. We need to resolve them all to their final valid selectors
Set<String> resolvedIndices = new HashSet<>(); Set<String> resolvedIndices = new HashSet<>();
resolveSelectorsAndCombine(indexAbstraction, selectorString, indicesOptions, resolvedIndices, projectMetadata); resolveSelectorsAndCollect(indexAbstraction, selectorString, indicesOptions, resolvedIndices, projectMetadata);
if (minus) { if (minus) {
finalIndices.removeAll(resolvedIndices); finalIndices.removeAll(resolvedIndices);
} else if (indicesOptions.ignoreUnavailable() == false || isAuthorized.test(indexAbstraction)) { } else if (indicesOptions.ignoreUnavailable() == false || isAuthorized.test(indexAbstraction)) {
@ -114,7 +112,7 @@ public class IndexAbstractionResolver {
return finalIndices; return finalIndices;
} }
private static void resolveSelectorsAndCombine( private static void resolveSelectorsAndCollect(
String indexAbstraction, String indexAbstraction,
String selectorString, String selectorString,
IndicesOptions indicesOptions, IndicesOptions indicesOptions,
@ -132,19 +130,8 @@ public class IndexAbstractionResolver {
selectorString = IndexComponentSelector.DATA.getKey(); selectorString = IndexComponentSelector.DATA.getKey();
} }
if (Regex.isMatchAllPattern(selectorString)) { // A selector is always passed along as-is, it's validity for this kind of abstraction is tested later
// Always accept data collect.add(IndexNameExpressionResolver.combineSelectorExpression(indexAbstraction, selectorString));
collect.add(IndexNameExpressionResolver.combineSelectorExpression(indexAbstraction, IndexComponentSelector.DATA.getKey()));
// Only put failures on the expression if the abstraction supports it.
if (acceptsAllSelectors) {
collect.add(
IndexNameExpressionResolver.combineSelectorExpression(indexAbstraction, IndexComponentSelector.FAILURES.getKey())
);
}
} else {
// A non-wildcard selector is always passed along as-is, it's validity for this kind of abstraction is tested later
collect.add(IndexNameExpressionResolver.combineSelectorExpression(indexAbstraction, selectorString));
}
} else { } else {
assert selectorString == null assert selectorString == null
: "A selector string [" + selectorString + "] is present but selectors are disabled in this context"; : "A selector string [" + selectorString + "] is present but selectors are disabled in this context";

View file

@ -2072,6 +2072,12 @@ public class IndexMetadata implements Diffable<IndexMetadata>, ToXContentFragmen
return this; return this;
} }
public Builder putRolloverInfos(Map<String, RolloverInfo> rolloverInfos) {
this.rolloverInfos.clear();
this.rolloverInfos.putAllFromMap(rolloverInfos);
return this;
}
public long version() { public long version() {
return this.version; return this.version;
} }

View file

@ -433,21 +433,9 @@ public class IndexNameExpressionResolver {
} }
} else { } else {
if (isExclusion) { if (isExclusion) {
if (IndexComponentSelector.ALL_APPLICABLE.equals(selector)) { resources.remove(new ResolvedExpression(baseExpression, selector));
resources.remove(new ResolvedExpression(baseExpression, IndexComponentSelector.DATA));
resources.remove(new ResolvedExpression(baseExpression, IndexComponentSelector.FAILURES));
} else {
resources.remove(new ResolvedExpression(baseExpression, selector));
}
} else if (ensureAliasOrIndexExists(context, baseExpression, selector)) { } else if (ensureAliasOrIndexExists(context, baseExpression, selector)) {
if (IndexComponentSelector.ALL_APPLICABLE.equals(selector)) { resources.add(new ResolvedExpression(baseExpression, selector));
resources.add(new ResolvedExpression(baseExpression, IndexComponentSelector.DATA));
if (context.getProject().getIndicesLookup().get(baseExpression).isDataStreamRelated()) {
resources.add(new ResolvedExpression(baseExpression, IndexComponentSelector.FAILURES));
}
} else {
resources.add(new ResolvedExpression(baseExpression, selector));
}
} }
} }
} }
@ -1279,8 +1267,7 @@ public class IndexNameExpressionResolver {
private static boolean resolvedExpressionsContainsAbstraction(Set<ResolvedExpression> resolvedExpressions, String abstractionName) { private static boolean resolvedExpressionsContainsAbstraction(Set<ResolvedExpression> resolvedExpressions, String abstractionName) {
return resolvedExpressions.contains(new ResolvedExpression(abstractionName)) return resolvedExpressions.contains(new ResolvedExpression(abstractionName))
|| resolvedExpressions.contains(new ResolvedExpression(abstractionName, IndexComponentSelector.DATA)) || resolvedExpressions.contains(new ResolvedExpression(abstractionName, IndexComponentSelector.DATA));
|| resolvedExpressions.contains(new ResolvedExpression(abstractionName, IndexComponentSelector.ALL_APPLICABLE));
} }
/** /**
@ -1585,8 +1572,7 @@ public class IndexNameExpressionResolver {
if (context.options.allowSelectors()) { if (context.options.allowSelectors()) {
// Ensure that the selectors are present and that they are compatible with the abstractions they are used with // Ensure that the selectors are present and that they are compatible with the abstractions they are used with
assert selector != null : "Earlier logic should have parsed selectors or added the default selectors already"; assert selector != null : "Earlier logic should have parsed selectors or added the default selectors already";
// Check if ::failures has been explicitly requested, since requesting ::* for non-data-stream abstractions would just // Check if ::failures has been explicitly requested
// return their data components.
if (IndexComponentSelector.FAILURES.equals(selector) && indexAbstraction.isDataStreamRelated() == false) { if (IndexComponentSelector.FAILURES.equals(selector) && indexAbstraction.isDataStreamRelated() == false) {
// If requested abstraction is not data stream related, then you cannot use ::failures // If requested abstraction is not data stream related, then you cannot use ::failures
if (ignoreUnavailable) { if (ignoreUnavailable) {
@ -1942,9 +1928,9 @@ public class IndexNameExpressionResolver {
final IndexMetadata.State excludeState = excludeState(context.getOptions()); final IndexMetadata.State excludeState = excludeState(context.getOptions());
Set<ResolvedExpression> resources = new HashSet<>(); Set<ResolvedExpression> resources = new HashSet<>();
if (context.isPreserveAliases() && indexAbstraction.getType() == Type.ALIAS) { if (context.isPreserveAliases() && indexAbstraction.getType() == Type.ALIAS) {
expandToApplicableSelectors(indexAbstraction, selector, resources); resources.add(new ResolvedExpression(indexAbstraction.getName(), selector));
} else if (context.isPreserveDataStreams() && indexAbstraction.getType() == Type.DATA_STREAM) { } else if (context.isPreserveDataStreams() && indexAbstraction.getType() == Type.DATA_STREAM) {
expandToApplicableSelectors(indexAbstraction, selector, resources); resources.add(new ResolvedExpression(indexAbstraction.getName(), selector));
} else { } else {
if (shouldIncludeRegularIndices(context.getOptions(), selector)) { if (shouldIncludeRegularIndices(context.getOptions(), selector)) {
for (int i = 0, n = indexAbstraction.getIndices().size(); i < n; i++) { for (int i = 0, n = indexAbstraction.getIndices().size(); i < n; i++) {
@ -1971,31 +1957,6 @@ public class IndexNameExpressionResolver {
return resources; return resources;
} }
/**
* Adds the abstraction and selector to the results when preserving data streams and aliases at wildcard resolution. If a selector
* is provided, the result is only added if the selector is applicable to the abstraction provided. If
* {@link IndexComponentSelector#ALL_APPLICABLE} is given, the selectors are expanded only to those which are applicable to the
* provided abstraction.
* @param indexAbstraction abstraction to add
* @param selector The selector to add
* @param resources Result collector which is updated with all applicable resolved expressions for a given abstraction and selector
* pair.
*/
private static void expandToApplicableSelectors(
IndexAbstraction indexAbstraction,
IndexComponentSelector selector,
Set<ResolvedExpression> resources
) {
if (IndexComponentSelector.ALL_APPLICABLE.equals(selector)) {
resources.add(new ResolvedExpression(indexAbstraction.getName(), IndexComponentSelector.DATA));
if (indexAbstraction.isDataStreamRelated()) {
resources.add(new ResolvedExpression(indexAbstraction.getName(), IndexComponentSelector.FAILURES));
}
} else if (selector == null || indexAbstraction.isDataStreamRelated() || selector.shouldIncludeFailures() == false) {
resources.add(new ResolvedExpression(indexAbstraction.getName(), selector));
}
}
private static List<ResolvedExpression> resolveEmptyOrTrivialWildcard(Context context, IndexComponentSelector selector) { private static List<ResolvedExpression> resolveEmptyOrTrivialWildcard(Context context, IndexComponentSelector selector) {
final String[] allIndices = resolveEmptyOrTrivialWildcardToAllIndices(context.getOptions(), context.getProject(), selector); final String[] allIndices = resolveEmptyOrTrivialWildcardToAllIndices(context.getOptions(), context.getProject(), selector);
List<String> indices; List<String> indices;
@ -2388,20 +2349,10 @@ public class IndexNameExpressionResolver {
String suffix = expression.substring(lastDoubleColon + SELECTOR_SEPARATOR.length()); String suffix = expression.substring(lastDoubleColon + SELECTOR_SEPARATOR.length());
IndexComponentSelector selector = IndexComponentSelector.getByKey(suffix); IndexComponentSelector selector = IndexComponentSelector.getByKey(suffix);
if (selector == null) { if (selector == null) {
// Do some work to surface a helpful error message for likely errors throw new InvalidIndexNameException(
if (Regex.isSimpleMatchPattern(suffix)) { expression,
throw new InvalidIndexNameException( "invalid usage of :: separator, [" + suffix + "] is not a recognized selector"
expression, );
"Invalid usage of :: separator, ["
+ suffix
+ "] contains a wildcard, but only the match all wildcard [*] is supported in a selector"
);
} else {
throw new InvalidIndexNameException(
expression,
"Invalid usage of :: separator, [" + suffix + "] is not a recognized selector"
);
}
} }
String expressionBase = expression.substring(0, lastDoubleColon); String expressionBase = expression.substring(0, lastDoubleColon);
ensureNoMoreSelectorSeparators(expressionBase, expression); ensureNoMoreSelectorSeparators(expressionBase, expression);

View file

@ -8,26 +8,12 @@
*/ */
package org.elasticsearch.cluster.routing; package org.elasticsearch.cluster.routing;
import org.elasticsearch.common.util.Countable;
import java.util.List; import java.util.List;
/** /**
* Allows to iterate over unrelated shards. * Allows to iterate over unrelated shards.
*/ */
public interface ShardsIterator extends Iterable<ShardRouting>, Countable { public interface ShardsIterator extends Iterable<ShardRouting> {
/**
* Resets the iterator to its initial state.
*/
void reset();
/**
* The number of shard routing instances.
*
* @return number of shard routing instances in this iterator
*/
int size();
/** /**
* The number of active shard routing instances * The number of active shard routing instances
@ -41,13 +27,6 @@ public interface ShardsIterator extends Iterable<ShardRouting>, Countable {
*/ */
ShardRouting nextOrNull(); ShardRouting nextOrNull();
/**
* Return the number of shards remaining in this {@link ShardsIterator}
*
* @return number of shard remaining
*/
int remaining();
@Override @Override
int hashCode(); int hashCode();

View file

@ -11,6 +11,7 @@ package org.elasticsearch.cluster.routing.allocation.allocator;
import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.allocation.NodeAllocationStatsAndWeightsCalculator.NodeAllocationStatsAndWeight; import org.elasticsearch.cluster.routing.allocation.NodeAllocationStatsAndWeightsCalculator.NodeAllocationStatsAndWeight;
import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders;
import org.elasticsearch.telemetry.metric.DoubleWithAttributes; import org.elasticsearch.telemetry.metric.DoubleWithAttributes;
import org.elasticsearch.telemetry.metric.LongWithAttributes; import org.elasticsearch.telemetry.metric.LongWithAttributes;
import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.telemetry.metric.MeterRegistry;
@ -28,17 +29,28 @@ import java.util.concurrent.atomic.AtomicReference;
*/ */
public class DesiredBalanceMetrics { public class DesiredBalanceMetrics {
/**
* @param unassignedShards Shards that are not assigned to any node.
* @param totalAllocations Shards that are assigned to a node.
* @param undesiredAllocationsExcludingShuttingDownNodes Shards that are assigned to a node but must move to alleviate a resource
* constraint per the {@link AllocationDeciders}. Excludes shards that must move
* because of a node shutting down.
*/
public record AllocationStats(long unassignedShards, long totalAllocations, long undesiredAllocationsExcludingShuttingDownNodes) {} public record AllocationStats(long unassignedShards, long totalAllocations, long undesiredAllocationsExcludingShuttingDownNodes) {}
public record NodeWeightStats(long shardCount, double diskUsageInBytes, double writeLoad, double nodeWeight) {} public record NodeWeightStats(long shardCount, double diskUsageInBytes, double writeLoad, double nodeWeight) {}
public static final DesiredBalanceMetrics NOOP = new DesiredBalanceMetrics(MeterRegistry.NOOP); // Reconciliation metrics.
/** See {@link #unassignedShards} */
public static final String UNASSIGNED_SHARDS_METRIC_NAME = "es.allocator.desired_balance.shards.unassigned.current"; public static final String UNASSIGNED_SHARDS_METRIC_NAME = "es.allocator.desired_balance.shards.unassigned.current";
/** See {@link #totalAllocations} */
public static final String TOTAL_SHARDS_METRIC_NAME = "es.allocator.desired_balance.shards.current"; public static final String TOTAL_SHARDS_METRIC_NAME = "es.allocator.desired_balance.shards.current";
/** See {@link #undesiredAllocationsExcludingShuttingDownNodes} */
public static final String UNDESIRED_ALLOCATION_COUNT_METRIC_NAME = "es.allocator.desired_balance.allocations.undesired.current"; public static final String UNDESIRED_ALLOCATION_COUNT_METRIC_NAME = "es.allocator.desired_balance.allocations.undesired.current";
/** {@link #UNDESIRED_ALLOCATION_COUNT_METRIC_NAME} / {@link #TOTAL_SHARDS_METRIC_NAME} */
public static final String UNDESIRED_ALLOCATION_RATIO_METRIC_NAME = "es.allocator.desired_balance.allocations.undesired.ratio"; public static final String UNDESIRED_ALLOCATION_RATIO_METRIC_NAME = "es.allocator.desired_balance.allocations.undesired.ratio";
// Desired balance node metrics.
public static final String DESIRED_BALANCE_NODE_WEIGHT_METRIC_NAME = "es.allocator.desired_balance.allocations.node_weight.current"; public static final String DESIRED_BALANCE_NODE_WEIGHT_METRIC_NAME = "es.allocator.desired_balance.allocations.node_weight.current";
public static final String DESIRED_BALANCE_NODE_SHARD_COUNT_METRIC_NAME = public static final String DESIRED_BALANCE_NODE_SHARD_COUNT_METRIC_NAME =
"es.allocator.desired_balance.allocations.node_shard_count.current"; "es.allocator.desired_balance.allocations.node_shard_count.current";
@ -47,6 +59,7 @@ public class DesiredBalanceMetrics {
public static final String DESIRED_BALANCE_NODE_DISK_USAGE_METRIC_NAME = public static final String DESIRED_BALANCE_NODE_DISK_USAGE_METRIC_NAME =
"es.allocator.desired_balance.allocations.node_disk_usage_bytes.current"; "es.allocator.desired_balance.allocations.node_disk_usage_bytes.current";
// Node weight metrics.
public static final String CURRENT_NODE_WEIGHT_METRIC_NAME = "es.allocator.allocations.node.weight.current"; public static final String CURRENT_NODE_WEIGHT_METRIC_NAME = "es.allocator.allocations.node.weight.current";
public static final String CURRENT_NODE_SHARD_COUNT_METRIC_NAME = "es.allocator.allocations.node.shard_count.current"; public static final String CURRENT_NODE_SHARD_COUNT_METRIC_NAME = "es.allocator.allocations.node.shard_count.current";
public static final String CURRENT_NODE_WRITE_LOAD_METRIC_NAME = "es.allocator.allocations.node.write_load.current"; public static final String CURRENT_NODE_WRITE_LOAD_METRIC_NAME = "es.allocator.allocations.node.write_load.current";
@ -59,6 +72,7 @@ public class DesiredBalanceMetrics {
public static final AllocationStats EMPTY_ALLOCATION_STATS = new AllocationStats(-1, -1, -1); public static final AllocationStats EMPTY_ALLOCATION_STATS = new AllocationStats(-1, -1, -1);
private volatile boolean nodeIsMaster = false; private volatile boolean nodeIsMaster = false;
/** /**
* Number of unassigned shards during last reconciliation * Number of unassigned shards during last reconciliation
*/ */
@ -70,9 +84,10 @@ public class DesiredBalanceMetrics {
private volatile long totalAllocations; private volatile long totalAllocations;
/** /**
* Number of assigned shards during last reconciliation that are not allocated on desired node and need to be moved * Number of assigned shards during last reconciliation that are not allocated on a desired node and need to be moved.
* This excludes shards that must be reassigned due to a shutting down node.
*/ */
private volatile long undesiredAllocations; private volatile long undesiredAllocationsExcludingShuttingDownNodes;
private final AtomicReference<Map<DiscoveryNode, NodeWeightStats>> weightStatsPerNodeRef = new AtomicReference<>(Map.of()); private final AtomicReference<Map<DiscoveryNode, NodeWeightStats>> weightStatsPerNodeRef = new AtomicReference<>(Map.of());
private final AtomicReference<Map<DiscoveryNode, NodeAllocationStatsAndWeight>> allocationStatsPerNodeRef = new AtomicReference<>( private final AtomicReference<Map<DiscoveryNode, NodeAllocationStatsAndWeight>> allocationStatsPerNodeRef = new AtomicReference<>(
@ -89,7 +104,7 @@ public class DesiredBalanceMetrics {
if (allocationStats != EMPTY_ALLOCATION_STATS) { if (allocationStats != EMPTY_ALLOCATION_STATS) {
this.unassignedShards = allocationStats.unassignedShards; this.unassignedShards = allocationStats.unassignedShards;
this.totalAllocations = allocationStats.totalAllocations; this.totalAllocations = allocationStats.totalAllocations;
this.undesiredAllocations = allocationStats.undesiredAllocationsExcludingShuttingDownNodes; this.undesiredAllocationsExcludingShuttingDownNodes = allocationStats.undesiredAllocationsExcludingShuttingDownNodes;
} }
weightStatsPerNodeRef.set(weightStatsPerNode); weightStatsPerNodeRef.set(weightStatsPerNode);
allocationStatsPerNodeRef.set(nodeAllocationStats); allocationStatsPerNodeRef.set(nodeAllocationStats);
@ -107,7 +122,7 @@ public class DesiredBalanceMetrics {
UNDESIRED_ALLOCATION_COUNT_METRIC_NAME, UNDESIRED_ALLOCATION_COUNT_METRIC_NAME,
"Total number of shards allocated on undesired nodes excluding shutting down nodes", "Total number of shards allocated on undesired nodes excluding shutting down nodes",
"{shard}", "{shard}",
this::getUndesiredAllocationsMetrics this::getUndesiredAllocationsExcludingShuttingDownNodesMetrics
); );
meterRegistry.registerDoublesGauge( meterRegistry.registerDoublesGauge(
UNDESIRED_ALLOCATION_RATIO_METRIC_NAME, UNDESIRED_ALLOCATION_RATIO_METRIC_NAME,
@ -115,6 +130,7 @@ public class DesiredBalanceMetrics {
"1", "1",
this::getUndesiredAllocationsRatioMetrics this::getUndesiredAllocationsRatioMetrics
); );
meterRegistry.registerDoublesGauge( meterRegistry.registerDoublesGauge(
DESIRED_BALANCE_NODE_WEIGHT_METRIC_NAME, DESIRED_BALANCE_NODE_WEIGHT_METRIC_NAME,
"Weight of nodes in the computed desired balance", "Weight of nodes in the computed desired balance",
@ -133,18 +149,19 @@ public class DesiredBalanceMetrics {
"bytes", "bytes",
this::getDesiredBalanceNodeDiskUsageMetrics this::getDesiredBalanceNodeDiskUsageMetrics
); );
meterRegistry.registerDoublesGauge(
CURRENT_NODE_WEIGHT_METRIC_NAME,
"The weight of nodes based on the current allocation state",
"unit",
this::getCurrentNodeWeightMetrics
);
meterRegistry.registerLongsGauge( meterRegistry.registerLongsGauge(
DESIRED_BALANCE_NODE_SHARD_COUNT_METRIC_NAME, DESIRED_BALANCE_NODE_SHARD_COUNT_METRIC_NAME,
"Shard count of nodes in the computed desired balance", "Shard count of nodes in the computed desired balance",
"unit", "unit",
this::getDesiredBalanceNodeShardCountMetrics this::getDesiredBalanceNodeShardCountMetrics
); );
meterRegistry.registerDoublesGauge(
CURRENT_NODE_WEIGHT_METRIC_NAME,
"The weight of nodes based on the current allocation state",
"unit",
this::getCurrentNodeWeightMetrics
);
meterRegistry.registerDoublesGauge( meterRegistry.registerDoublesGauge(
CURRENT_NODE_WRITE_LOAD_METRIC_NAME, CURRENT_NODE_WRITE_LOAD_METRIC_NAME,
"The current write load of nodes", "The current write load of nodes",
@ -194,7 +211,7 @@ public class DesiredBalanceMetrics {
} }
public long undesiredAllocations() { public long undesiredAllocations() {
return undesiredAllocations; return undesiredAllocationsExcludingShuttingDownNodes;
} }
private List<LongWithAttributes> getUnassignedShardsMetrics() { private List<LongWithAttributes> getUnassignedShardsMetrics() {
@ -330,8 +347,8 @@ public class DesiredBalanceMetrics {
return getIfPublishing(totalAllocations); return getIfPublishing(totalAllocations);
} }
private List<LongWithAttributes> getUndesiredAllocationsMetrics() { private List<LongWithAttributes> getUndesiredAllocationsExcludingShuttingDownNodesMetrics() {
return getIfPublishing(undesiredAllocations); return getIfPublishing(undesiredAllocationsExcludingShuttingDownNodes);
} }
private List<LongWithAttributes> getIfPublishing(long value) { private List<LongWithAttributes> getIfPublishing(long value) {
@ -344,7 +361,7 @@ public class DesiredBalanceMetrics {
private List<DoubleWithAttributes> getUndesiredAllocationsRatioMetrics() { private List<DoubleWithAttributes> getUndesiredAllocationsRatioMetrics() {
if (nodeIsMaster) { if (nodeIsMaster) {
var total = totalAllocations; var total = totalAllocations;
var undesired = undesiredAllocations; var undesired = undesiredAllocationsExcludingShuttingDownNodes;
return List.of(new DoubleWithAttributes(total != 0 ? (double) undesired / total : 0.0)); return List.of(new DoubleWithAttributes(total != 0 ? (double) undesired / total : 0.0));
} }
return List.of(); return List.of();
@ -357,7 +374,7 @@ public class DesiredBalanceMetrics {
public void zeroAllMetrics() { public void zeroAllMetrics() {
unassignedShards = 0; unassignedShards = 0;
totalAllocations = 0; totalAllocations = 0;
undesiredAllocations = 0; undesiredAllocationsExcludingShuttingDownNodes = 0;
weightStatsPerNodeRef.set(Map.of()); weightStatsPerNodeRef.set(Map.of());
allocationStatsPerNodeRef.set(Map.of()); allocationStatsPerNodeRef.set(Map.of());
} }

View file

@ -21,10 +21,7 @@ import org.elasticsearch.cluster.routing.RoutingNodes;
import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.UnassignedInfo;
import org.elasticsearch.cluster.routing.UnassignedInfo.AllocationStatus; import org.elasticsearch.cluster.routing.UnassignedInfo.AllocationStatus;
import org.elasticsearch.cluster.routing.allocation.NodeAllocationStatsAndWeightsCalculator;
import org.elasticsearch.cluster.routing.allocation.NodeAllocationStatsAndWeightsCalculator.NodeAllocationStatsAndWeight;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.cluster.routing.allocation.allocator.DesiredBalanceMetrics.AllocationStats;
import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.cluster.routing.allocation.decider.Decision;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.ClusterSettings;
@ -37,9 +34,7 @@ import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import java.util.Comparator; import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.function.BiFunction; import java.util.function.BiFunction;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@ -83,16 +78,8 @@ public class DesiredBalanceReconciler {
private double undesiredAllocationsLogThreshold; private double undesiredAllocationsLogThreshold;
private final NodeAllocationOrdering allocationOrdering = new NodeAllocationOrdering(); private final NodeAllocationOrdering allocationOrdering = new NodeAllocationOrdering();
private final NodeAllocationOrdering moveOrdering = new NodeAllocationOrdering(); private final NodeAllocationOrdering moveOrdering = new NodeAllocationOrdering();
private final DesiredBalanceMetrics desiredBalanceMetrics;
private final NodeAllocationStatsAndWeightsCalculator nodeAllocationStatsAndWeightsCalculator;
public DesiredBalanceReconciler( public DesiredBalanceReconciler(ClusterSettings clusterSettings, ThreadPool threadPool) {
ClusterSettings clusterSettings,
ThreadPool threadPool,
DesiredBalanceMetrics desiredBalanceMetrics,
NodeAllocationStatsAndWeightsCalculator nodeAllocationStatsAndWeightsCalculator
) {
this.desiredBalanceMetrics = desiredBalanceMetrics;
this.undesiredAllocationLogInterval = new FrequencyCappedAction( this.undesiredAllocationLogInterval = new FrequencyCappedAction(
threadPool.relativeTimeInMillisSupplier(), threadPool.relativeTimeInMillisSupplier(),
TimeValue.timeValueMinutes(5) TimeValue.timeValueMinutes(5)
@ -102,7 +89,6 @@ public class DesiredBalanceReconciler {
UNDESIRED_ALLOCATIONS_LOG_THRESHOLD_SETTING, UNDESIRED_ALLOCATIONS_LOG_THRESHOLD_SETTING,
value -> this.undesiredAllocationsLogThreshold = value value -> this.undesiredAllocationsLogThreshold = value
); );
this.nodeAllocationStatsAndWeightsCalculator = nodeAllocationStatsAndWeightsCalculator;
} }
/** /**
@ -111,12 +97,13 @@ public class DesiredBalanceReconciler {
* @param desiredBalance The new desired cluster shard allocation * @param desiredBalance The new desired cluster shard allocation
* @param allocation Cluster state information with which to make decisions, contains routing table metadata that will be modified to * @param allocation Cluster state information with which to make decisions, contains routing table metadata that will be modified to
* reach the given desired balance. * reach the given desired balance.
* @return {@link DesiredBalanceMetrics.AllocationStats} for this round of reconciliation changes.
*/ */
public void reconcile(DesiredBalance desiredBalance, RoutingAllocation allocation) { public DesiredBalanceMetrics.AllocationStats reconcile(DesiredBalance desiredBalance, RoutingAllocation allocation) {
var nodeIds = allocation.routingNodes().getAllNodeIds(); var nodeIds = allocation.routingNodes().getAllNodeIds();
allocationOrdering.retainNodes(nodeIds); allocationOrdering.retainNodes(nodeIds);
moveOrdering.retainNodes(nodeIds); moveOrdering.retainNodes(nodeIds);
new Reconciliation(desiredBalance, allocation).run(); return new Reconciliation(desiredBalance, allocation).run();
} }
public void clear() { public void clear() {
@ -124,6 +111,11 @@ public class DesiredBalanceReconciler {
moveOrdering.clear(); moveOrdering.clear();
} }
/**
* Handles updating the {@code RoutingNodes} to reflect the next steps towards the new {@code DesiredBalance}. Updates are limited by
* throttling (there are limits on the number of concurrent shard moves) or resource constraints (some shard moves might not be
* immediately possible until other shards move first).
*/
private class Reconciliation { private class Reconciliation {
private final DesiredBalance desiredBalance; private final DesiredBalance desiredBalance;
@ -136,7 +128,7 @@ public class DesiredBalanceReconciler {
this.routingNodes = allocation.routingNodes(); this.routingNodes = allocation.routingNodes();
} }
void run() { DesiredBalanceMetrics.AllocationStats run() {
try (var ignored = allocation.withReconcilingFlag()) { try (var ignored = allocation.withReconcilingFlag()) {
logger.debug("Reconciling desired balance for [{}]", desiredBalance.lastConvergedIndex()); logger.debug("Reconciling desired balance for [{}]", desiredBalance.lastConvergedIndex());
@ -145,13 +137,13 @@ public class DesiredBalanceReconciler {
// no data nodes, so fail allocation to report red health // no data nodes, so fail allocation to report red health
failAllocationOfNewPrimaries(allocation); failAllocationOfNewPrimaries(allocation);
logger.trace("no nodes available, nothing to reconcile"); logger.trace("no nodes available, nothing to reconcile");
return; return DesiredBalanceMetrics.EMPTY_ALLOCATION_STATS;
} }
if (desiredBalance.assignments().isEmpty()) { if (desiredBalance.assignments().isEmpty()) {
// no desired state yet but it is on its way and we'll reroute again when it is ready // no desired state yet but it is on its way and we'll reroute again when it is ready
logger.trace("desired balance is empty, nothing to reconcile"); logger.trace("desired balance is empty, nothing to reconcile");
return; return DesiredBalanceMetrics.EMPTY_ALLOCATION_STATS;
} }
// compute next moves towards current desired balance: // compute next moves towards current desired balance:
@ -164,38 +156,22 @@ public class DesiredBalanceReconciler {
// 2. move any shards that cannot remain where they are // 2. move any shards that cannot remain where they are
logger.trace("Reconciler#moveShards"); logger.trace("Reconciler#moveShards");
moveShards(); moveShards();
// 3. move any other shards that are desired elsewhere // 3. move any other shards that are desired elsewhere
// This is the rebalancing work. The previous calls were necessary, to assign unassigned shard copies, and move shards that
// violate resource thresholds. Now we run moves to improve the relative node resource loads.
logger.trace("Reconciler#balance"); logger.trace("Reconciler#balance");
var allocationStats = balance(); DesiredBalanceMetrics.AllocationStats allocationStats = balance();
logger.debug("Reconciliation is complete"); logger.debug("Reconciliation is complete");
return allocationStats;
updateDesireBalanceMetrics(allocationStats);
} }
} }
private void updateDesireBalanceMetrics(AllocationStats allocationStats) { /**
var nodesStatsAndWeights = nodeAllocationStatsAndWeightsCalculator.nodesAllocationStatsAndWeights( * Checks whether every shard is either assigned or ignored. Expected to be called after {@link #allocateUnassigned()}.
allocation.metadata(), */
allocation.routingNodes(),
allocation.clusterInfo(),
desiredBalance
);
Map<DiscoveryNode, NodeAllocationStatsAndWeight> filteredNodeAllocationStatsAndWeights = new HashMap<>(
nodesStatsAndWeights.size()
);
for (var nodeStatsAndWeight : nodesStatsAndWeights.entrySet()) {
var node = allocation.nodes().get(nodeStatsAndWeight.getKey());
if (node != null) {
filteredNodeAllocationStatsAndWeights.put(node, nodeStatsAndWeight.getValue());
}
}
desiredBalanceMetrics.updateMetrics(allocationStats, desiredBalance.weightsPerNode(), filteredNodeAllocationStatsAndWeights);
}
private boolean allocateUnassignedInvariant() { private boolean allocateUnassignedInvariant() {
// after allocateUnassigned, every shard must be either assigned or ignored
assert routingNodes.unassigned().isEmpty(); assert routingNodes.unassigned().isEmpty();
final var shardCounts = allocation.metadata() final var shardCounts = allocation.metadata()
@ -269,45 +245,55 @@ public class DesiredBalanceReconciler {
} }
/* /*
* Create some comparators to sort the unassigned shard copies in priority to allocate order.
* TODO: We could be smarter here and group the shards by index and then * TODO: We could be smarter here and group the shards by index and then
* use the sorter to save some iterations. * use the sorter to save some iterations.
*/ */
final PriorityComparator secondaryComparator = PriorityComparator.getAllocationComparator(allocation); final PriorityComparator indexPriorityComparator = PriorityComparator.getAllocationComparator(allocation);
final Comparator<ShardRouting> comparator = (o1, o2) -> { final Comparator<ShardRouting> shardAllocationPriorityComparator = (o1, o2) -> {
// Prioritize assigning a primary shard copy, if one is a primary and the other is not.
if (o1.primary() ^ o2.primary()) { if (o1.primary() ^ o2.primary()) {
return o1.primary() ? -1 : 1; return o1.primary() ? -1 : 1;
} }
// Then order shards in the same index arbitrarily by shard ID.
if (o1.getIndexName().compareTo(o2.getIndexName()) == 0) { if (o1.getIndexName().compareTo(o2.getIndexName()) == 0) {
return o1.getId() - o2.getId(); return o1.getId() - o2.getId();
} }
// Lastly, prioritize system indices, then use index priority of non-system indices, then by age, etc.
//
// this comparator is more expensive than all the others up there // this comparator is more expensive than all the others up there
// that's why it's added last even though it could be easier to read // that's why it's added last even though it could be easier to read
// if we'd apply it earlier. this comparator will only differentiate across // if we'd apply it earlier. this comparator will only differentiate across
// indices all shards of the same index is treated equally. // indices all shards of the same index is treated equally.
final int secondary = secondaryComparator.compare(o1, o2); final int secondaryComparison = indexPriorityComparator.compare(o1, o2);
assert secondary != 0 : "Index names are equal, should be returned early."; assert secondaryComparison != 0 : "Index names are equal, should be returned early.";
return secondary; return secondaryComparison;
}; };
/* /*
* we use 2 arrays and move replicas to the second array once we allocated an identical * we use 2 arrays and move replicas to the second array once we allocated an identical
* replica in the current iteration to make sure all indices get allocated in the same manner. * replica in the current iteration to make sure all indices get allocated in the same manner.
* The arrays are sorted by primaries first and then by index and shard ID so a 2 indices with * The arrays are sorted by primaries first and then by index and shard ID so 2 indices with
* 2 replica and 1 shard would look like: * 2 replica and 1 shard would look like:
* [(0,P,IDX1), (0,P,IDX2), (0,R,IDX1), (0,R,IDX1), (0,R,IDX2), (0,R,IDX2)] * [(0,P,IDX1), (0,P,IDX2), (0,R,IDX1), (0,R,IDX1), (0,R,IDX2), (0,R,IDX2)]
* if we allocate for instance (0, R, IDX1) we move the second replica to the secondary array and proceed with * if we allocate for instance (0, R, IDX1) we move the second replica to the secondary array and proceed with
* the next replica. If we could not find a node to allocate (0,R,IDX1) we move all it's replicas to ignoreUnassigned. * the next replica. If we could not find a node to allocate (0,R,IDX1) we move all it's replicas to ignoreUnassigned.
*/ */
ShardRouting[] primary = unassigned.drain(); ShardRouting[] orderedShardAllocationList = unassigned.drain();
ShardRouting[] secondary = new ShardRouting[primary.length]; ShardRouting[] deferredShardAllocationList = new ShardRouting[orderedShardAllocationList.length];
int secondaryLength = 0; int deferredShardAllocationListLength = 0;
int primaryLength = primary.length; int orderedShardAllocationListLength = orderedShardAllocationList.length;
ArrayUtil.timSort(primary, comparator); ArrayUtil.timSort(orderedShardAllocationList, shardAllocationPriorityComparator);
do { do {
nextShard: for (int i = 0; i < primaryLength; i++) { nextShard: for (int i = 0; i < orderedShardAllocationListLength; i++) {
final var shard = primary[i]; final var shard = orderedShardAllocationList[i];
final var assignment = desiredBalance.getAssignment(shard.shardId()); final var assignment = desiredBalance.getAssignment(shard.shardId());
// An ignored shard copy is one that has no desired balance assignment.
final boolean ignored = assignment == null || isIgnored(routingNodes, shard, assignment); final boolean ignored = assignment == null || isIgnored(routingNodes, shard, assignment);
AllocationStatus unallocatedStatus; AllocationStatus unallocatedStatus;
if (ignored) { if (ignored) {
unallocatedStatus = AllocationStatus.NO_ATTEMPT; unallocatedStatus = AllocationStatus.NO_ATTEMPT;
@ -337,8 +323,13 @@ public class DesiredBalanceReconciler {
if (shard.primary() == false) { if (shard.primary() == false) {
// copy over the same replica shards to the secondary array so they will get allocated // copy over the same replica shards to the secondary array so they will get allocated
// in a subsequent iteration, allowing replicas of other shards to be allocated first // in a subsequent iteration, allowing replicas of other shards to be allocated first
while (i < primaryLength - 1 && comparator.compare(primary[i], primary[i + 1]) == 0) { while (i < orderedShardAllocationListLength - 1
secondary[secondaryLength++] = primary[++i]; && shardAllocationPriorityComparator.compare(
orderedShardAllocationList[i],
orderedShardAllocationList[i + 1]
) == 0) {
deferredShardAllocationList[deferredShardAllocationListLength++] =
orderedShardAllocationList[++i];
} }
} }
continue nextShard; continue nextShard;
@ -358,18 +349,23 @@ public class DesiredBalanceReconciler {
logger.debug("No eligible node found to assign shard [{}]", shard); logger.debug("No eligible node found to assign shard [{}]", shard);
unassigned.ignoreShard(shard, unallocatedStatus, allocation.changes()); unassigned.ignoreShard(shard, unallocatedStatus, allocation.changes());
if (shard.primary() == false) { if (shard.primary() == false) {
// we could not allocate it and we are a replica - check if we can ignore the other replicas // We could not allocate the shard copy and the copy is a replica: check if we can ignore the other unassigned
while (i < primaryLength - 1 && comparator.compare(primary[i], primary[i + 1]) == 0) { // replicas.
unassigned.ignoreShard(primary[++i], unallocatedStatus, allocation.changes()); while (i < orderedShardAllocationListLength - 1
&& shardAllocationPriorityComparator.compare(
orderedShardAllocationList[i],
orderedShardAllocationList[i + 1]
) == 0) {
unassigned.ignoreShard(orderedShardAllocationList[++i], unallocatedStatus, allocation.changes());
} }
} }
} }
primaryLength = secondaryLength; ShardRouting[] tmp = orderedShardAllocationList;
ShardRouting[] tmp = primary; orderedShardAllocationList = deferredShardAllocationList;
primary = secondary; deferredShardAllocationList = tmp;
secondary = tmp; orderedShardAllocationListLength = deferredShardAllocationListLength;
secondaryLength = 0; deferredShardAllocationListLength = 0;
} while (primaryLength > 0); } while (orderedShardAllocationListLength > 0);
} }
private final class NodeIdsIterator implements Iterator<String> { private final class NodeIdsIterator implements Iterator<String> {
@ -377,11 +373,7 @@ public class DesiredBalanceReconciler {
private final ShardRouting shard; private final ShardRouting shard;
private final RoutingNodes routingNodes; private final RoutingNodes routingNodes;
/** /**
* Contains the source of the nodeIds used for shard assignment. It could be: * Contains the source of the nodeIds used for shard assignment.
* * desired - when using desired nodes
* * forced initial allocation - when initial allocation is forced to certain nodes by shrink/split/clone index operation
* * fallback - when assigning the primary shard is temporarily not possible on desired nodes,
* and it is assigned elsewhere in the cluster
*/ */
private NodeIdSource source; private NodeIdSource source;
private Iterator<String> nodeIds; private Iterator<String> nodeIds;
@ -437,11 +429,21 @@ public class DesiredBalanceReconciler {
} }
private enum NodeIdSource { private enum NodeIdSource {
// Using desired nodes.
DESIRED, DESIRED,
// Initial allocation is forced to certain nodes by shrink/split/clone index operation.
FORCED_INITIAL_ALLOCATION, FORCED_INITIAL_ALLOCATION,
// Assigning the primary shard is temporarily not possible on desired nodes, and it is assigned elsewhere in the cluster.
FALLBACK; FALLBACK;
} }
/**
* Checks whether the {@code shard} copy has been assigned to a node or not in {@code assignment}.
* @param routingNodes The current routing information
* @param shard A particular shard copy
* @param assignment The assignments for shard primary and replica copies
* @return Whether the shard has a node assignment.
*/
private boolean isIgnored(RoutingNodes routingNodes, ShardRouting shard, ShardAssignment assignment) { private boolean isIgnored(RoutingNodes routingNodes, ShardRouting shard, ShardAssignment assignment) {
if (assignment.ignored() == 0) { if (assignment.ignored() == 0) {
// no shards are ignored // no shards are ignored
@ -518,7 +520,8 @@ public class DesiredBalanceReconciler {
} }
} }
private AllocationStats balance() { private DesiredBalanceMetrics.AllocationStats balance() {
// Check if rebalancing is disabled.
if (allocation.deciders().canRebalance(allocation).type() != Decision.Type.YES) { if (allocation.deciders().canRebalance(allocation).type() != Decision.Type.YES) {
return DesiredBalanceMetrics.EMPTY_ALLOCATION_STATS; return DesiredBalanceMetrics.EMPTY_ALLOCATION_STATS;
} }
@ -587,8 +590,11 @@ public class DesiredBalanceReconciler {
} }
maybeLogUndesiredAllocationsWarning(totalAllocations, undesiredAllocationsExcludingShuttingDownNodes, routingNodes.size()); maybeLogUndesiredAllocationsWarning(totalAllocations, undesiredAllocationsExcludingShuttingDownNodes, routingNodes.size());
return new DesiredBalanceMetrics.AllocationStats(
return new AllocationStats(unassignedShards, totalAllocations, undesiredAllocationsExcludingShuttingDownNodes); unassignedShards,
totalAllocations,
undesiredAllocationsExcludingShuttingDownNodes
);
} }
private void maybeLogUndesiredAllocationsWarning(int totalAllocations, int undesiredAllocations, int nodeCount) { private void maybeLogUndesiredAllocationsWarning(int totalAllocations, int undesiredAllocations, int nodeCount) {

View file

@ -16,6 +16,7 @@ import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.ClusterStateTaskExecutor;
import org.elasticsearch.cluster.ClusterStateTaskListener; import org.elasticsearch.cluster.ClusterStateTaskListener;
import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata; import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.AllocationService;
import org.elasticsearch.cluster.routing.allocation.AllocationService.RerouteStrategy; import org.elasticsearch.cluster.routing.allocation.AllocationService.RerouteStrategy;
@ -39,6 +40,7 @@ import org.elasticsearch.threadpool.ThreadPool;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Comparator; import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -87,6 +89,7 @@ public class DesiredBalanceShardsAllocator implements ShardsAllocator {
private final AtomicReference<DesiredBalance> currentDesiredBalanceRef = new AtomicReference<>(DesiredBalance.NOT_MASTER); private final AtomicReference<DesiredBalance> currentDesiredBalanceRef = new AtomicReference<>(DesiredBalance.NOT_MASTER);
private volatile boolean resetCurrentDesiredBalance = false; private volatile boolean resetCurrentDesiredBalance = false;
private final Set<String> processedNodeShutdowns = new HashSet<>(); private final Set<String> processedNodeShutdowns = new HashSet<>();
private final NodeAllocationStatsAndWeightsCalculator nodeAllocationStatsAndWeightsCalculator;
private final DesiredBalanceMetrics desiredBalanceMetrics; private final DesiredBalanceMetrics desiredBalanceMetrics;
/** /**
* Manages balancer round results in order to report on the balancer activity in a configurable manner. * Manages balancer round results in order to report on the balancer activity in a configurable manner.
@ -136,17 +139,13 @@ public class DesiredBalanceShardsAllocator implements ShardsAllocator {
NodeAllocationStatsAndWeightsCalculator nodeAllocationStatsAndWeightsCalculator NodeAllocationStatsAndWeightsCalculator nodeAllocationStatsAndWeightsCalculator
) { ) {
this.desiredBalanceMetrics = new DesiredBalanceMetrics(telemetryProvider.getMeterRegistry()); this.desiredBalanceMetrics = new DesiredBalanceMetrics(telemetryProvider.getMeterRegistry());
this.nodeAllocationStatsAndWeightsCalculator = nodeAllocationStatsAndWeightsCalculator;
this.balancerRoundSummaryService = new AllocationBalancingRoundSummaryService(threadPool, clusterService.getClusterSettings()); this.balancerRoundSummaryService = new AllocationBalancingRoundSummaryService(threadPool, clusterService.getClusterSettings());
this.delegateAllocator = delegateAllocator; this.delegateAllocator = delegateAllocator;
this.threadPool = threadPool; this.threadPool = threadPool;
this.reconciler = reconciler; this.reconciler = reconciler;
this.desiredBalanceComputer = desiredBalanceComputer; this.desiredBalanceComputer = desiredBalanceComputer;
this.desiredBalanceReconciler = new DesiredBalanceReconciler( this.desiredBalanceReconciler = new DesiredBalanceReconciler(clusterService.getClusterSettings(), threadPool);
clusterService.getClusterSettings(),
threadPool,
desiredBalanceMetrics,
nodeAllocationStatsAndWeightsCalculator
);
this.desiredBalanceComputation = new ContinuousComputation<>(threadPool.generic()) { this.desiredBalanceComputation = new ContinuousComputation<>(threadPool.generic()) {
@Override @Override
@ -347,6 +346,10 @@ public class DesiredBalanceShardsAllocator implements ShardsAllocator {
return new BalancingRoundSummary(DesiredBalance.shardMovements(oldDesiredBalance, newDesiredBalance)); return new BalancingRoundSummary(DesiredBalance.shardMovements(oldDesiredBalance, newDesiredBalance));
} }
/**
* Submits the desired balance to be reconciled (applies the desired changes to the routing table) and creates and publishes a new
* cluster state. The data nodes will receive and apply the new cluster state to start/move/remove shards.
*/
protected void submitReconcileTask(DesiredBalance desiredBalance) { protected void submitReconcileTask(DesiredBalance desiredBalance) {
masterServiceTaskQueue.submitTask("reconcile-desired-balance", new ReconcileDesiredBalanceTask(desiredBalance), null); masterServiceTaskQueue.submitTask("reconcile-desired-balance", new ReconcileDesiredBalanceTask(desiredBalance), null);
} }
@ -357,7 +360,11 @@ public class DesiredBalanceShardsAllocator implements ShardsAllocator {
} else { } else {
logger.debug("Reconciling desired balance for [{}]", desiredBalance.lastConvergedIndex()); logger.debug("Reconciling desired balance for [{}]", desiredBalance.lastConvergedIndex());
} }
recordTime(cumulativeReconciliationTime, () -> desiredBalanceReconciler.reconcile(desiredBalance, allocation)); recordTime(cumulativeReconciliationTime, () -> {
DesiredBalanceMetrics.AllocationStats allocationStats = desiredBalanceReconciler.reconcile(desiredBalance, allocation);
updateDesireBalanceMetrics(desiredBalance, allocation, allocationStats);
});
if (logger.isTraceEnabled()) { if (logger.isTraceEnabled()) {
logger.trace("Reconciled desired balance: {}", desiredBalance); logger.trace("Reconciled desired balance: {}", desiredBalance);
} else { } else {
@ -391,6 +398,28 @@ public class DesiredBalanceShardsAllocator implements ShardsAllocator {
resetCurrentDesiredBalance = true; resetCurrentDesiredBalance = true;
} }
private void updateDesireBalanceMetrics(
DesiredBalance desiredBalance,
RoutingAllocation routingAllocation,
DesiredBalanceMetrics.AllocationStats allocationStats
) {
var nodesStatsAndWeights = nodeAllocationStatsAndWeightsCalculator.nodesAllocationStatsAndWeights(
routingAllocation.metadata(),
routingAllocation.routingNodes(),
routingAllocation.clusterInfo(),
desiredBalance
);
Map<DiscoveryNode, NodeAllocationStatsAndWeightsCalculator.NodeAllocationStatsAndWeight> filteredNodeAllocationStatsAndWeights =
new HashMap<>(nodesStatsAndWeights.size());
for (var nodeStatsAndWeight : nodesStatsAndWeights.entrySet()) {
var node = routingAllocation.nodes().get(nodeStatsAndWeight.getKey());
if (node != null) {
filteredNodeAllocationStatsAndWeights.put(node, nodeStatsAndWeight.getValue());
}
}
desiredBalanceMetrics.updateMetrics(allocationStats, desiredBalance.weightsPerNode(), filteredNodeAllocationStatsAndWeights);
}
public DesiredBalanceStats getStats() { public DesiredBalanceStats getStats() {
return new DesiredBalanceStats( return new DesiredBalanceStats(
Math.max(currentDesiredBalanceRef.get().lastConvergedIndex(), 0L), Math.max(currentDesiredBalanceRef.get().lastConvergedIndex(), 0L),

View file

@ -33,10 +33,28 @@ public class OrderedShardsIterator implements Iterator<ShardRouting> {
private final ArrayDeque<NodeAndShardIterator> queue; private final ArrayDeque<NodeAndShardIterator> queue;
/**
* This iterator will progress through the shards node by node, each node's shards ordered from most write active to least.
*
* @param allocation
* @param ordering
* @return An iterator over all shards in the {@link RoutingNodes} held by {@code allocation} (all shards assigned to a node). The
* iterator will progress node by node, where each node's shards are ordered from data stream write indices, to regular indices and
* lastly to data stream read indices.
*/
public static OrderedShardsIterator createForNecessaryMoves(RoutingAllocation allocation, NodeAllocationOrdering ordering) { public static OrderedShardsIterator createForNecessaryMoves(RoutingAllocation allocation, NodeAllocationOrdering ordering) {
return create(allocation.routingNodes(), createShardsComparator(allocation), ordering); return create(allocation.routingNodes(), createShardsComparator(allocation), ordering);
} }
/**
* This iterator will progress through the shards node by node, each node's shards ordered from least write active to most.
*
* @param allocation
* @param ordering
* @return An iterator over all shards in the {@link RoutingNodes} held by {@code allocation} (all shards assigned to a node). The
* iterator will progress node by node, where each node's shards are ordered from data stream read indices, to regular indices and
* lastly to data stream write indices.
*/
public static OrderedShardsIterator createForBalancing(RoutingAllocation allocation, NodeAllocationOrdering ordering) { public static OrderedShardsIterator createForBalancing(RoutingAllocation allocation, NodeAllocationOrdering ordering) {
return create(allocation.routingNodes(), createShardsComparator(allocation).reversed(), ordering); return create(allocation.routingNodes(), createShardsComparator(allocation).reversed(), ordering);
} }
@ -61,6 +79,9 @@ public class OrderedShardsIterator implements Iterator<ShardRouting> {
return Iterators.forArray(shards); return Iterators.forArray(shards);
} }
/**
* Prioritizes write indices of data streams, and deprioritizes data stream read indices, relative to regular indices.
*/
private static Comparator<ShardRouting> createShardsComparator(RoutingAllocation allocation) { private static Comparator<ShardRouting> createShardsComparator(RoutingAllocation allocation) {
return Comparator.comparing(shard -> { return Comparator.comparing(shard -> {
final ProjectMetadata project = allocation.metadata().projectFor(shard.index()); final ProjectMetadata project = allocation.metadata().projectFor(shard.index());

View file

@ -20,6 +20,8 @@ import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.index.ConcurrentMergeScheduler; import org.apache.lucene.index.ConcurrentMergeScheduler;
import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.FilterCodecReader; import org.apache.lucene.index.FilterCodecReader;
import org.apache.lucene.index.FilterDirectoryReader; import org.apache.lucene.index.FilterDirectoryReader;
import org.apache.lucene.index.FilterLeafReader; import org.apache.lucene.index.FilterLeafReader;
@ -190,7 +192,18 @@ public class Lucene {
throw new IllegalStateException("no commit found in the directory"); throw new IllegalStateException("no commit found in the directory");
} }
} }
// Need to figure out what the parent field is that, so that validation in IndexWriter doesn't fail
// if no parent field is configured, but FieldInfo says there is a parent field.
String parentField = null;
final IndexCommit cp = getIndexCommit(si, directory); final IndexCommit cp = getIndexCommit(si, directory);
try (var reader = DirectoryReader.open(cp)) {
var topLevelFieldInfos = FieldInfos.getMergedFieldInfos(reader);
for (FieldInfo fieldInfo : topLevelFieldInfos) {
if (fieldInfo.isParentField()) {
parentField = fieldInfo.getName();
}
}
}
try ( try (
IndexWriter writer = new IndexWriter( IndexWriter writer = new IndexWriter(
directory, directory,
@ -198,6 +211,7 @@ public class Lucene {
.setIndexCommit(cp) .setIndexCommit(cp)
.setCommitOnClose(false) .setCommitOnClose(false)
.setOpenMode(IndexWriterConfig.OpenMode.APPEND) .setOpenMode(IndexWriterConfig.OpenMode.APPEND)
.setParentField(parentField)
) )
) { ) {
// do nothing and close this will kick off IndexFileDeleter which will remove all pending files // do nothing and close this will kick off IndexFileDeleter which will remove all pending files

View file

@ -30,18 +30,29 @@ public class SizeLimitingStringWriter extends StringWriter {
this.sizeLimit = sizeLimit; this.sizeLimit = sizeLimit;
} }
private void checkSizeLimit(int additionalChars) { private int limitSize(int additionalChars) {
int bufLen = getBuffer().length(); int neededSize = getBuffer().length() + additionalChars;
if (bufLen + additionalChars > sizeLimit) { if (neededSize > sizeLimit) {
throw new SizeLimitExceededException( return additionalChars - (neededSize - sizeLimit);
Strings.format("String [%s...] has exceeded the size limit [%s]", getBuffer().substring(0, Math.min(bufLen, 20)), sizeLimit)
);
} }
return additionalChars;
}
private void throwSizeLimitExceeded(int limitedChars, int requestedChars) {
assert limitedChars < requestedChars;
int bufLen = getBuffer().length();
int foundSize = bufLen - limitedChars + requestedChars; // reconstitute original
String selection = getBuffer().substring(0, Math.min(bufLen, 20));
throw new SizeLimitExceededException(
Strings.format("String [%s...] has size [%d] which exceeds the size limit [%d]", selection, foundSize, sizeLimit)
);
} }
@Override @Override
public void write(int c) { public void write(int c) {
checkSizeLimit(1); if (limitSize(1) != 1) {
throwSizeLimitExceeded(0, 1);
}
super.write(c); super.write(c);
} }
@ -49,20 +60,29 @@ public class SizeLimitingStringWriter extends StringWriter {
@Override @Override
public void write(char[] cbuf, int off, int len) { public void write(char[] cbuf, int off, int len) {
checkSizeLimit(len); int limitedLen = limitSize(len);
super.write(cbuf, off, len); if (limitedLen > 0) {
super.write(cbuf, off, limitedLen);
}
if (limitedLen != len) {
throwSizeLimitExceeded(limitedLen, len);
}
} }
@Override @Override
public void write(String str) { public void write(String str) {
checkSizeLimit(str.length()); this.write(str, 0, str.length());
super.write(str);
} }
@Override @Override
public void write(String str, int off, int len) { public void write(String str, int off, int len) {
checkSizeLimit(len); int limitedLen = limitSize(len);
super.write(str, off, len); if (limitedLen > 0) {
super.write(str, off, limitedLen);
}
if (limitedLen != len) {
throwSizeLimitExceeded(limitedLen, len);
}
} }
// append(...) delegates to write(...) methods // append(...) delegates to write(...) methods

View file

@ -13,7 +13,7 @@ import java.util.Collections;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
public class PlainIterator<T> implements Iterable<T>, Countable { public class PlainIterator<T> implements Iterable<T> {
private final List<T> elements; private final List<T> elements;
// Calls to nextOrNull might be performed on different threads in the transport actions so we need the volatile // Calls to nextOrNull might be performed on different threads in the transport actions so we need the volatile
@ -43,7 +43,6 @@ public class PlainIterator<T> implements Iterable<T>, Countable {
} }
} }
@Override
public int size() { public int size() {
return elements.size(); return elements.size();
} }

View file

@ -29,7 +29,6 @@ import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.MetadataFieldMapper;
import org.elasticsearch.index.mapper.NestedLookup;
import org.elasticsearch.index.mapper.ProvidedIdFieldMapper; import org.elasticsearch.index.mapper.ProvidedIdFieldMapper;
import org.elasticsearch.index.mapper.RoutingFieldMapper; import org.elasticsearch.index.mapper.RoutingFieldMapper;
import org.elasticsearch.index.mapper.RoutingFields; import org.elasticsearch.index.mapper.RoutingFields;
@ -156,9 +155,6 @@ public enum IndexMode {
@Override @Override
public void validateMapping(MappingLookup lookup) { public void validateMapping(MappingLookup lookup) {
if (lookup.nestedLookup() != NestedLookup.EMPTY) {
throw new IllegalArgumentException("cannot have nested fields when index is in " + tsdbMode());
}
if (((RoutingFieldMapper) lookup.getMapper(RoutingFieldMapper.NAME)).required()) { if (((RoutingFieldMapper) lookup.getMapper(RoutingFieldMapper.NAME)).required()) {
throw new IllegalArgumentException(routingRequiredBad()); throw new IllegalArgumentException(routingRequiredBad());
} }

View file

@ -232,7 +232,8 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust
mapperMetrics mapperMetrics
); );
this.indexFieldData = new IndexFieldDataService(indexSettings, indicesFieldDataCache, circuitBreakerService); this.indexFieldData = new IndexFieldDataService(indexSettings, indicesFieldDataCache, circuitBreakerService);
if (indexSettings.getIndexSortConfig().hasIndexSort()) { boolean sourceOnly = Boolean.parseBoolean(indexSettings.getSettings().get("index.source_only"));
if (indexSettings.getIndexSortConfig().hasIndexSort() && sourceOnly == false) {
// we delay the actual creation of the sort order for this index because the mapping has not been merged yet. // we delay the actual creation of the sort order for this index because the mapping has not been merged yet.
// The sort order is validated right after the merge of the mapping later in the process. // The sort order is validated right after the merge of the mapping later in the process.
this.indexSortSupplier = () -> indexSettings.getIndexSortConfig() this.indexSortSupplier = () -> indexSettings.getIndexSortConfig()

View file

@ -22,6 +22,7 @@ import org.elasticsearch.search.fetch.StoredFieldsSpec;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Objects;
/** /**
* Loads values from {@code _source}. This whole process is very slow and cast-tastic, * Loads values from {@code _source}. This whole process is very slow and cast-tastic,
@ -230,7 +231,7 @@ public abstract class BlockSourceReader implements BlockLoader.RowStrideReader {
@Override @Override
protected void append(BlockLoader.Builder builder, Object v) { protected void append(BlockLoader.Builder builder, Object v) {
((BlockLoader.BytesRefBuilder) builder).appendBytesRef(toBytesRef(scratch, (String) v)); ((BlockLoader.BytesRefBuilder) builder).appendBytesRef(toBytesRef(scratch, Objects.toString(v)));
} }
@Override @Override

View file

@ -665,8 +665,14 @@ public abstract class DocumentParserContext {
if (idField != null) { if (idField != null) {
// We just need to store the id as indexed field, so that IndexWriter#deleteDocuments(term) can then // We just need to store the id as indexed field, so that IndexWriter#deleteDocuments(term) can then
// delete it when the root document is deleted too. // delete it when the root document is deleted too.
// NOTE: we don't support nested fields in tsdb so it's safe to assume the standard id mapper.
doc.add(new StringField(IdFieldMapper.NAME, idField.binaryValue(), Field.Store.NO)); doc.add(new StringField(IdFieldMapper.NAME, idField.binaryValue(), Field.Store.NO));
} else if (indexSettings().getMode() == IndexMode.TIME_SERIES) {
// For time series indices, the _id is generated from the _tsid, which in turn is generated from the values of the configured
// routing fields. At this point in document parsing, we can't guarantee that we've parsed all the routing fields yet, so the
// parent document's _id is not yet available.
// So we just add the child document without the parent _id, then in TimeSeriesIdFieldMapper#postParse we set the _id on all
// child documents once we've calculated it.
assert getRoutingFields().equals(RoutingFields.Noop.INSTANCE) == false;
} else { } else {
throw new IllegalStateException("The root document of a nested document should have an _id field"); throw new IllegalStateException("The root document of a nested document should have an _id field");
} }

View file

@ -23,6 +23,7 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Optional; import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.Stack;
/** /**
* Block loader for fields that use fallback synthetic source implementation. * Block loader for fields that use fallback synthetic source implementation.
@ -191,18 +192,45 @@ public abstract class FallbackSyntheticSourceBlockLoader implements BlockLoader
.createParser(filterParserConfig, nameValue.value().bytes, nameValue.value().offset + 1, nameValue.value().length - 1) .createParser(filterParserConfig, nameValue.value().bytes, nameValue.value().offset + 1, nameValue.value().length - 1)
) { ) {
parser.nextToken(); parser.nextToken();
var fieldNameInParser = new StringBuilder(nameValue.name()); var fieldNames = new Stack<String>() {
while (true) { {
if (parser.currentToken() == XContentParser.Token.FIELD_NAME) { push(nameValue.name());
fieldNameInParser.append('.').append(parser.currentName());
if (fieldNameInParser.toString().equals(fieldName)) {
parser.nextToken();
break;
}
} }
};
while (parser.currentToken() != null) {
// We are descending into an object/array hierarchy of arbitrary depth
// until we find the field that we need.
while (true) {
if (parser.currentToken() == XContentParser.Token.FIELD_NAME) {
fieldNames.push(parser.currentName());
var nameInParser = String.join(".", fieldNames);
if (nameInParser.equals(fieldName)) {
parser.nextToken();
break;
}
} else {
assert parser.currentToken() == XContentParser.Token.START_OBJECT
|| parser.currentToken() == XContentParser.Token.START_ARRAY;
}
parser.nextToken();
}
parseWithReader(parser, blockValues);
parser.nextToken(); parser.nextToken();
// We are coming back up in object/array hierarchy.
// If arrays are present we will explore all array items by going back down again.
while (parser.currentToken() == XContentParser.Token.END_OBJECT
|| parser.currentToken() == XContentParser.Token.END_ARRAY) {
// When exiting an object arrays we'll see END_OBJECT followed by END_ARRAY, but we only need to pop the object name
// once.
if (parser.currentToken() == XContentParser.Token.END_OBJECT) {
fieldNames.pop();
}
parser.nextToken();
}
} }
parseWithReader(parser, blockValues);
} }
} }

View file

@ -27,6 +27,7 @@ public class MapperFeatures implements FeatureSpecification {
"mapper.counted_keyword.synthetic_source_native_support" "mapper.counted_keyword.synthetic_source_native_support"
); );
public static final NodeFeature TSDB_NESTED_FIELD_SUPPORT = new NodeFeature("mapper.tsdb_nested_field_support");
public static final NodeFeature META_FETCH_FIELDS_ERROR_CODE_CHANGED = new NodeFeature("meta_fetch_fields_error_code_changed"); public static final NodeFeature META_FETCH_FIELDS_ERROR_CODE_CHANGED = new NodeFeature("meta_fetch_fields_error_code_changed");
public static final NodeFeature SPARSE_VECTOR_STORE_SUPPORT = new NodeFeature("mapper.sparse_vector.store_support"); public static final NodeFeature SPARSE_VECTOR_STORE_SUPPORT = new NodeFeature("mapper.sparse_vector.store_support");
public static final NodeFeature SORT_FIELDS_CHECK_FOR_NESTED_OBJECT_FIX = new NodeFeature("mapper.nested.sorting_fields_check_fix"); public static final NodeFeature SORT_FIELDS_CHECK_FOR_NESTED_OBJECT_FIX = new NodeFeature("mapper.nested.sorting_fields_check_fix");
@ -49,6 +50,7 @@ public class MapperFeatures implements FeatureSpecification {
COUNTED_KEYWORD_SYNTHETIC_SOURCE_NATIVE_SUPPORT, COUNTED_KEYWORD_SYNTHETIC_SOURCE_NATIVE_SUPPORT,
SORT_FIELDS_CHECK_FOR_NESTED_OBJECT_FIX, SORT_FIELDS_CHECK_FOR_NESTED_OBJECT_FIX,
DYNAMIC_HANDLING_IN_COPY_TO, DYNAMIC_HANDLING_IN_COPY_TO,
TSDB_NESTED_FIELD_SUPPORT,
SourceFieldMapper.SYNTHETIC_RECOVERY_SOURCE, SourceFieldMapper.SYNTHETIC_RECOVERY_SOURCE,
ObjectMapper.SUBOBJECTS_FALSE_MAPPING_UPDATE_FIX ObjectMapper.SUBOBJECTS_FALSE_MAPPING_UPDATE_FIX
); );

View file

@ -269,7 +269,7 @@ public class NumberFieldMapper extends FieldMapper {
dimension.setValue(true); dimension.setValue(true);
} }
MappedFieldType ft = new NumberFieldType(context.buildFullName(leafName()), this); MappedFieldType ft = new NumberFieldType(context.buildFullName(leafName()), this, context.isSourceSynthetic());
hasScript = script.get() != null; hasScript = script.get() != null;
onScriptError = onScriptErrorParam.getValue(); onScriptError = onScriptErrorParam.getValue();
return new NumberFieldMapper(leafName(), ft, builderParams(this, context), context.isSourceSynthetic(), this); return new NumberFieldMapper(leafName(), ft, builderParams(this, context), context.isSourceSynthetic(), this);
@ -463,6 +463,11 @@ public class NumberFieldMapper extends FieldMapper {
BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) { BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) {
return new BlockSourceReader.DoublesBlockLoader(sourceValueFetcher, lookup); return new BlockSourceReader.DoublesBlockLoader(sourceValueFetcher, lookup);
} }
@Override
BlockLoader blockLoaderFromFallbackSyntheticSource(String fieldName, Number nullValue, boolean coerce) {
return floatingPointBlockLoaderFromFallbackSyntheticSource(this, fieldName, nullValue, coerce);
}
}, },
FLOAT("float", NumericType.FLOAT) { FLOAT("float", NumericType.FLOAT) {
@Override @Override
@ -647,6 +652,11 @@ public class NumberFieldMapper extends FieldMapper {
BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) { BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) {
return new BlockSourceReader.DoublesBlockLoader(sourceValueFetcher, lookup); return new BlockSourceReader.DoublesBlockLoader(sourceValueFetcher, lookup);
} }
@Override
BlockLoader blockLoaderFromFallbackSyntheticSource(String fieldName, Number nullValue, boolean coerce) {
return floatingPointBlockLoaderFromFallbackSyntheticSource(this, fieldName, nullValue, coerce);
}
}, },
DOUBLE("double", NumericType.DOUBLE) { DOUBLE("double", NumericType.DOUBLE) {
@Override @Override
@ -797,6 +807,11 @@ public class NumberFieldMapper extends FieldMapper {
BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) { BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) {
return new BlockSourceReader.DoublesBlockLoader(sourceValueFetcher, lookup); return new BlockSourceReader.DoublesBlockLoader(sourceValueFetcher, lookup);
} }
@Override
BlockLoader blockLoaderFromFallbackSyntheticSource(String fieldName, Number nullValue, boolean coerce) {
return floatingPointBlockLoaderFromFallbackSyntheticSource(this, fieldName, nullValue, coerce);
}
}, },
BYTE("byte", NumericType.BYTE) { BYTE("byte", NumericType.BYTE) {
@Override @Override
@ -911,6 +926,11 @@ public class NumberFieldMapper extends FieldMapper {
return new BlockSourceReader.IntsBlockLoader(sourceValueFetcher, lookup); return new BlockSourceReader.IntsBlockLoader(sourceValueFetcher, lookup);
} }
@Override
BlockLoader blockLoaderFromFallbackSyntheticSource(String fieldName, Number nullValue, boolean coerce) {
return integerBlockLoaderFromFallbackSyntheticSource(this, fieldName, nullValue, coerce);
}
private boolean isOutOfRange(Object value) { private boolean isOutOfRange(Object value) {
double doubleValue = objectToDouble(value); double doubleValue = objectToDouble(value);
return doubleValue < Byte.MIN_VALUE || doubleValue > Byte.MAX_VALUE; return doubleValue < Byte.MIN_VALUE || doubleValue > Byte.MAX_VALUE;
@ -1024,6 +1044,11 @@ public class NumberFieldMapper extends FieldMapper {
return new BlockSourceReader.IntsBlockLoader(sourceValueFetcher, lookup); return new BlockSourceReader.IntsBlockLoader(sourceValueFetcher, lookup);
} }
@Override
BlockLoader blockLoaderFromFallbackSyntheticSource(String fieldName, Number nullValue, boolean coerce) {
return integerBlockLoaderFromFallbackSyntheticSource(this, fieldName, nullValue, coerce);
}
private boolean isOutOfRange(Object value) { private boolean isOutOfRange(Object value) {
double doubleValue = objectToDouble(value); double doubleValue = objectToDouble(value);
return doubleValue < Short.MIN_VALUE || doubleValue > Short.MAX_VALUE; return doubleValue < Short.MIN_VALUE || doubleValue > Short.MAX_VALUE;
@ -1210,6 +1235,11 @@ public class NumberFieldMapper extends FieldMapper {
BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) { BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup) {
return new BlockSourceReader.IntsBlockLoader(sourceValueFetcher, lookup); return new BlockSourceReader.IntsBlockLoader(sourceValueFetcher, lookup);
} }
@Override
BlockLoader blockLoaderFromFallbackSyntheticSource(String fieldName, Number nullValue, boolean coerce) {
return integerBlockLoaderFromFallbackSyntheticSource(this, fieldName, nullValue, coerce);
}
}, },
LONG("long", NumericType.LONG) { LONG("long", NumericType.LONG) {
@Override @Override
@ -1358,6 +1388,26 @@ public class NumberFieldMapper extends FieldMapper {
return new BlockSourceReader.LongsBlockLoader(sourceValueFetcher, lookup); return new BlockSourceReader.LongsBlockLoader(sourceValueFetcher, lookup);
} }
@Override
BlockLoader blockLoaderFromFallbackSyntheticSource(String fieldName, Number nullValue, boolean coerce) {
var reader = new NumberFallbackSyntheticSourceReader(this, nullValue, coerce) {
@Override
public void writeToBlock(List<Number> values, BlockLoader.Builder blockBuilder) {
var builder = (BlockLoader.LongBuilder) blockBuilder;
for (var value : values) {
builder.appendLong(value.longValue());
}
}
};
return new FallbackSyntheticSourceBlockLoader(reader, fieldName) {
@Override
public Builder builder(BlockFactory factory, int expectedCount) {
return factory.longs(expectedCount);
}
};
}
private boolean isOutOfRange(Object value) { private boolean isOutOfRange(Object value) {
if (value instanceof Long) { if (value instanceof Long) {
return false; return false;
@ -1626,6 +1676,106 @@ public class NumberFieldMapper extends FieldMapper {
abstract BlockLoader blockLoaderFromDocValues(String fieldName); abstract BlockLoader blockLoaderFromDocValues(String fieldName);
abstract BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup); abstract BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher, BlockSourceReader.LeafIteratorLookup lookup);
abstract BlockLoader blockLoaderFromFallbackSyntheticSource(String fieldName, Number nullValue, boolean coerce);
// All values that fit into integer are returned as integers
private static BlockLoader integerBlockLoaderFromFallbackSyntheticSource(
NumberType type,
String fieldName,
Number nullValue,
boolean coerce
) {
var reader = new NumberFallbackSyntheticSourceReader(type, nullValue, coerce) {
@Override
public void writeToBlock(List<Number> values, BlockLoader.Builder blockBuilder) {
var builder = (BlockLoader.IntBuilder) blockBuilder;
for (var value : values) {
builder.appendInt(value.intValue());
}
}
};
return new FallbackSyntheticSourceBlockLoader(reader, fieldName) {
@Override
public Builder builder(BlockFactory factory, int expectedCount) {
return factory.ints(expectedCount);
}
};
}
// All floating point values are returned as doubles
private static BlockLoader floatingPointBlockLoaderFromFallbackSyntheticSource(
NumberType type,
String fieldName,
Number nullValue,
boolean coerce
) {
var reader = new NumberFallbackSyntheticSourceReader(type, nullValue, coerce) {
@Override
public void writeToBlock(List<Number> values, BlockLoader.Builder blockBuilder) {
var builder = (BlockLoader.DoubleBuilder) blockBuilder;
for (var value : values) {
builder.appendDouble(value.doubleValue());
}
}
};
return new FallbackSyntheticSourceBlockLoader(reader, fieldName) {
@Override
public Builder builder(BlockFactory factory, int expectedCount) {
return factory.doubles(expectedCount);
}
};
}
abstract static class NumberFallbackSyntheticSourceReader extends FallbackSyntheticSourceBlockLoader.ReaderWithNullValueSupport<
Number> {
private final NumberType type;
private final Number nullValue;
private final boolean coerce;
NumberFallbackSyntheticSourceReader(NumberType type, Number nullValue, boolean coerce) {
super(nullValue);
this.type = type;
this.nullValue = nullValue;
this.coerce = coerce;
}
@Override
public void convertValue(Object value, List<Number> accumulator) {
if (coerce && value.equals("")) {
if (nullValue != null) {
accumulator.add(nullValue);
}
}
try {
var converted = type.parse(value, coerce);
accumulator.add(converted);
} catch (Exception e) {
// Malformed value, skip it
}
}
@Override
public void parseNonNullValue(XContentParser parser, List<Number> accumulator) throws IOException {
// Aligned with implementation of `value(XContentParser)`
if (coerce && parser.currentToken() == Token.VALUE_STRING && parser.textLength() == 0) {
if (nullValue != null) {
accumulator.add(nullValue);
}
}
try {
Number rawValue = type.parse(parser, coerce);
// Transform number to correct type (e.g. reduce precision)
accumulator.add(type.parse(rawValue, coerce));
} catch (Exception e) {
// Malformed value, skip it
}
}
};
} }
public static class NumberFieldType extends SimpleMappedFieldType { public static class NumberFieldType extends SimpleMappedFieldType {
@ -1637,6 +1787,7 @@ public class NumberFieldMapper extends FieldMapper {
private final boolean isDimension; private final boolean isDimension;
private final MetricType metricType; private final MetricType metricType;
private final IndexMode indexMode; private final IndexMode indexMode;
private final boolean isSyntheticSource;
public NumberFieldType( public NumberFieldType(
String name, String name,
@ -1650,7 +1801,8 @@ public class NumberFieldMapper extends FieldMapper {
FieldValues<Number> script, FieldValues<Number> script,
boolean isDimension, boolean isDimension,
MetricType metricType, MetricType metricType,
IndexMode indexMode IndexMode indexMode,
boolean isSyntheticSource
) { ) {
super(name, isIndexed, isStored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta); super(name, isIndexed, isStored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta);
this.type = Objects.requireNonNull(type); this.type = Objects.requireNonNull(type);
@ -1660,9 +1812,10 @@ public class NumberFieldMapper extends FieldMapper {
this.isDimension = isDimension; this.isDimension = isDimension;
this.metricType = metricType; this.metricType = metricType;
this.indexMode = indexMode; this.indexMode = indexMode;
this.isSyntheticSource = isSyntheticSource;
} }
NumberFieldType(String name, Builder builder) { NumberFieldType(String name, Builder builder, boolean isSyntheticSource) {
this( this(
name, name,
builder.type, builder.type,
@ -1675,7 +1828,8 @@ public class NumberFieldMapper extends FieldMapper {
builder.scriptValues(), builder.scriptValues(),
builder.dimension.getValue(), builder.dimension.getValue(),
builder.metric.getValue(), builder.metric.getValue(),
builder.indexMode builder.indexMode,
isSyntheticSource
); );
} }
@ -1684,7 +1838,7 @@ public class NumberFieldMapper extends FieldMapper {
} }
public NumberFieldType(String name, NumberType type, boolean isIndexed) { public NumberFieldType(String name, NumberType type, boolean isIndexed) {
this(name, type, isIndexed, false, true, true, null, Collections.emptyMap(), null, false, null, null); this(name, type, isIndexed, false, true, true, null, Collections.emptyMap(), null, false, null, null, false);
} }
@Override @Override
@ -1761,6 +1915,11 @@ public class NumberFieldMapper extends FieldMapper {
if (hasDocValues()) { if (hasDocValues()) {
return type.blockLoaderFromDocValues(name()); return type.blockLoaderFromDocValues(name());
} }
if (isSyntheticSource) {
return type.blockLoaderFromFallbackSyntheticSource(name(), nullValue, coerce);
}
BlockSourceReader.LeafIteratorLookup lookup = isStored() || isIndexed() BlockSourceReader.LeafIteratorLookup lookup = isStored() || isIndexed()
? BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name()) ? BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name())
: BlockSourceReader.lookupMatchingAll(); : BlockSourceReader.lookupMatchingAll();
@ -1876,7 +2035,7 @@ public class NumberFieldMapper extends FieldMapper {
private final MetricType metricType; private final MetricType metricType;
private boolean allowMultipleValues; private boolean allowMultipleValues;
private final IndexVersion indexCreatedVersion; private final IndexVersion indexCreatedVersion;
private final boolean storeMalformedFields; private final boolean isSyntheticSource;
private final IndexMode indexMode; private final IndexMode indexMode;
@ -1884,7 +2043,7 @@ public class NumberFieldMapper extends FieldMapper {
String simpleName, String simpleName,
MappedFieldType mappedFieldType, MappedFieldType mappedFieldType,
BuilderParams builderParams, BuilderParams builderParams,
boolean storeMalformedFields, boolean isSyntheticSource,
Builder builder Builder builder
) { ) {
super(simpleName, mappedFieldType, builderParams); super(simpleName, mappedFieldType, builderParams);
@ -1904,7 +2063,7 @@ public class NumberFieldMapper extends FieldMapper {
this.metricType = builder.metric.getValue(); this.metricType = builder.metric.getValue();
this.allowMultipleValues = builder.allowMultipleValues; this.allowMultipleValues = builder.allowMultipleValues;
this.indexCreatedVersion = builder.indexCreatedVersion; this.indexCreatedVersion = builder.indexCreatedVersion;
this.storeMalformedFields = storeMalformedFields; this.isSyntheticSource = isSyntheticSource;
this.indexMode = builder.indexMode; this.indexMode = builder.indexMode;
} }
@ -1939,7 +2098,7 @@ public class NumberFieldMapper extends FieldMapper {
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
if (ignoreMalformed.value() && context.parser().currentToken().isValue()) { if (ignoreMalformed.value() && context.parser().currentToken().isValue()) {
context.addIgnoredField(mappedFieldType.name()); context.addIgnoredField(mappedFieldType.name());
if (storeMalformedFields) { if (isSyntheticSource) {
// Save a copy of the field so synthetic source can load it // Save a copy of the field so synthetic source can load it
context.doc().add(IgnoreMalformedStoredValues.storedField(fullPath(), context.parser())); context.doc().add(IgnoreMalformedStoredValues.storedField(fullPath(), context.parser()));
} }

View file

@ -9,7 +9,9 @@
package org.elasticsearch.index.mapper; package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
@ -135,13 +137,21 @@ public class TimeSeriesIdFieldMapper extends MetadataFieldMapper {
} }
context.doc().add(new SortedDocValuesField(fieldType().name(), timeSeriesId)); context.doc().add(new SortedDocValuesField(fieldType().name(), timeSeriesId));
TsidExtractingIdFieldMapper.createField( BytesRef uidEncoded = TsidExtractingIdFieldMapper.createField(
context, context,
getIndexVersionCreated(context).before(IndexVersions.TIME_SERIES_ROUTING_HASH_IN_ID) getIndexVersionCreated(context).before(IndexVersions.TIME_SERIES_ROUTING_HASH_IN_ID)
? routingPathFields.routingBuilder() ? routingPathFields.routingBuilder()
: null, : null,
timeSeriesId timeSeriesId
); );
// We need to add the uid or id to nested Lucene documents so that when a document gets deleted, the nested documents are
// also deleted. Usually this happens when the nested document is created (in DocumentParserContext#createNestedContext), but
// for time-series indices the _id isn't available at that point.
for (LuceneDocument doc : context.nonRootDocuments()) {
assert doc.getField(IdFieldMapper.NAME) == null;
doc.add(new StringField(IdFieldMapper.NAME, uidEncoded, Field.Store.NO));
}
} }
private IndexVersion getIndexVersionCreated(final DocumentParserContext context) { private IndexVersion getIndexVersionCreated(final DocumentParserContext context) {

View file

@ -46,7 +46,11 @@ public class TsidExtractingIdFieldMapper extends IdFieldMapper {
private static final long SEED = 0; private static final long SEED = 0;
public static void createField(DocumentParserContext context, IndexRouting.ExtractFromSource.Builder routingBuilder, BytesRef tsid) { public static BytesRef createField(
DocumentParserContext context,
IndexRouting.ExtractFromSource.Builder routingBuilder,
BytesRef tsid
) {
final long timestamp = DataStreamTimestampFieldMapper.extractTimestampValue(context.doc()); final long timestamp = DataStreamTimestampFieldMapper.extractTimestampValue(context.doc());
String id; String id;
if (routingBuilder != null) { if (routingBuilder != null) {
@ -94,6 +98,7 @@ public class TsidExtractingIdFieldMapper extends IdFieldMapper {
BytesRef uidEncoded = Uid.encodeId(context.id()); BytesRef uidEncoded = Uid.encodeId(context.id());
context.doc().add(new StringField(NAME, uidEncoded, Field.Store.YES)); context.doc().add(new StringField(NAME, uidEncoded, Field.Store.YES));
return uidEncoded;
} }
public static String createId(int routingHash, BytesRef tsid, long timestamp) { public static String createId(int routingHash, BytesRef tsid, long timestamp) {

View file

@ -2404,6 +2404,12 @@ public class DenseVectorFieldMapper extends FieldMapper {
} }
KnnVectorValues.DocIndexIterator iterator = values.iterator(); KnnVectorValues.DocIndexIterator iterator = values.iterator();
return docId -> { return docId -> {
if (iterator.docID() > docId) {
return hasValue = false;
}
if (iterator.docID() == docId) {
return hasValue = true;
}
hasValue = docId == iterator.advance(docId); hasValue = docId == iterator.advance(docId);
hasMagnitude = hasValue && magnitudeReader != null && magnitudeReader.advanceExact(docId); hasMagnitude = hasValue && magnitudeReader != null && magnitudeReader.advanceExact(docId);
ord = iterator.index(); ord = iterator.index();
@ -2414,6 +2420,12 @@ public class DenseVectorFieldMapper extends FieldMapper {
if (byteVectorValues != null) { if (byteVectorValues != null) {
KnnVectorValues.DocIndexIterator iterator = byteVectorValues.iterator(); KnnVectorValues.DocIndexIterator iterator = byteVectorValues.iterator();
return docId -> { return docId -> {
if (iterator.docID() > docId) {
return hasValue = false;
}
if (iterator.docID() == docId) {
return hasValue = true;
}
hasValue = docId == iterator.advance(docId); hasValue = docId == iterator.advance(docId);
ord = iterator.index(); ord = iterator.index();
return hasValue; return hasValue;
@ -2476,6 +2488,12 @@ public class DenseVectorFieldMapper extends FieldMapper {
return null; return null;
} }
return docId -> { return docId -> {
if (values.docID() > docId) {
return hasValue = false;
}
if (values.docID() == docId) {
return hasValue = true;
}
hasValue = docId == values.advance(docId); hasValue = docId == values.advance(docId);
return hasValue; return hasValue;
}; };

View file

@ -4313,17 +4313,15 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl
assert waitForEngineOrClosedShardListeners.isDone(); assert waitForEngineOrClosedShardListeners.isDone();
try { try {
synchronized (engineMutex) { synchronized (engineMutex) {
final var currentEngine = getEngine();
currentEngine.prepareForEngineReset();
var engineConfig = newEngineConfig(replicationTracker);
verifyNotClosed(); verifyNotClosed();
IOUtils.close(currentEngine); getEngine().prepareForEngineReset();
var newEngine = createEngine(engineConfig); var newEngine = createEngine(newEngineConfig(replicationTracker));
currentEngineReference.set(newEngine); IOUtils.close(currentEngineReference.getAndSet(newEngine));
onNewEngine(newEngine); onNewEngine(newEngine);
} }
onSettingsChanged(); onSettingsChanged();
} catch (Exception e) { } catch (Exception e) {
// we want to fail the shard in the case prepareForEngineReset throws
failShard("unable to reset engine", e); failShard("unable to reset engine", e);
} }
} }

View file

@ -57,14 +57,17 @@ public record IngestStats(Stats totalStats, List<PipelineStat> pipelineStats, Ma
* Read from a stream. * Read from a stream.
*/ */
public static IngestStats read(StreamInput in) throws IOException { public static IngestStats read(StreamInput in) throws IOException {
var stats = new Stats(in); var stats = readStats(in);
var size = in.readVInt(); var size = in.readVInt();
if (stats == Stats.IDENTITY && size == 0) {
return IDENTITY;
}
var pipelineStats = new ArrayList<PipelineStat>(size); var pipelineStats = new ArrayList<PipelineStat>(size);
var processorStats = Maps.<String, List<ProcessorStat>>newMapWithExpectedSize(size); var processorStats = Maps.<String, List<ProcessorStat>>newMapWithExpectedSize(size);
for (var i = 0; i < size; i++) { for (var i = 0; i < size; i++) {
var pipelineId = in.readString(); var pipelineId = in.readString();
var pipelineStat = new Stats(in); var pipelineStat = readStats(in);
var byteStat = in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0) ? new ByteStats(in) : new ByteStats(0, 0); var byteStat = in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0) ? new ByteStats(in) : new ByteStats(0, 0);
pipelineStats.add(new PipelineStat(pipelineId, pipelineStat, byteStat)); pipelineStats.add(new PipelineStat(pipelineId, pipelineStat, byteStat));
int processorsSize = in.readVInt(); int processorsSize = in.readVInt();
@ -72,7 +75,7 @@ public record IngestStats(Stats totalStats, List<PipelineStat> pipelineStats, Ma
for (var j = 0; j < processorsSize; j++) { for (var j = 0; j < processorsSize; j++) {
var processorName = in.readString(); var processorName = in.readString();
var processorType = in.readString(); var processorType = in.readString();
var processorStat = new Stats(in); var processorStat = readStats(in);
processorStatsPerPipeline.add(new ProcessorStat(processorName, processorType, processorStat)); processorStatsPerPipeline.add(new ProcessorStat(processorName, processorType, processorStat));
} }
processorStats.put(pipelineId, Collections.unmodifiableList(processorStatsPerPipeline)); processorStats.put(pipelineId, Collections.unmodifiableList(processorStatsPerPipeline));
@ -167,6 +170,21 @@ public record IngestStats(Stats totalStats, List<PipelineStat> pipelineStats, Ma
return totalsPerPipelineProcessor; return totalsPerPipelineProcessor;
} }
/**
* Read {@link Stats} from a stream.
*/
private static Stats readStats(StreamInput in) throws IOException {
long ingestCount = in.readVLong();
long ingestTimeInMillis = in.readVLong();
long ingestCurrent = in.readVLong();
long ingestFailedCount = in.readVLong();
if (ingestCount == 0 && ingestTimeInMillis == 0 && ingestCurrent == 0 && ingestFailedCount == 0) {
return Stats.IDENTITY;
} else {
return new Stats(ingestCount, ingestTimeInMillis, ingestCurrent, ingestFailedCount);
}
}
public record Stats(long ingestCount, long ingestTimeInMillis, long ingestCurrent, long ingestFailedCount) public record Stats(long ingestCount, long ingestTimeInMillis, long ingestCurrent, long ingestFailedCount)
implements implements
Writeable, Writeable,
@ -174,13 +192,6 @@ public record IngestStats(Stats totalStats, List<PipelineStat> pipelineStats, Ma
public static final Stats IDENTITY = new Stats(0, 0, 0, 0); public static final Stats IDENTITY = new Stats(0, 0, 0, 0);
/**
* Read from a stream.
*/
public Stats(StreamInput in) throws IOException {
this(in.readVLong(), in.readVLong(), in.readVLong(), in.readVLong());
}
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
out.writeVLong(ingestCount); out.writeVLong(ingestCount);

Some files were not shown because too many files have changed in this diff Show more