From 19fe0a480cfe2af84e5a44b28e31ea53ec08186d Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 18 Feb 2025 16:40:52 +1100 Subject: [PATCH 01/29] Mute org.elasticsearch.xpack.esql.action.EsqlActionBreakerIT org.elasticsearch.xpack.esql.action.EsqlActionBreakerIT #122810 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 842befc8e032..ddcd4a988dbd 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -397,6 +397,8 @@ tests: - class: org.elasticsearch.repositories.blobstore.testkit.analyze.S3RepositoryAnalysisRestIT method: testRepositoryAnalysis issue: https://github.com/elastic/elasticsearch/issues/122799 +- class: org.elasticsearch.xpack.esql.action.EsqlActionBreakerIT + issue: https://github.com/elastic/elasticsearch/issues/122810 # Examples: # From 850249b8970b1f1e2ff622867c475d9a007aaa9d Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Tue, 18 Feb 2025 20:11:36 +1100 Subject: [PATCH 02/29] Handle status code 200 for s3 CMU response (#122815) When a CopmleteMultipartUpload request fails after the initial 200 response, the status code of the failure response use to be not set and hence got translated to status code 0. With #116212, we handle this case accordingly. Since AWS SDK 1.12.691, the status code is now set to 200 instead of 0. This PR changes our error handling code accordingly. Relates: #122431 Relates: #116212 Resolves: #122799 Relevant AWS SDK change https://github.com/aws/aws-sdk-java/blame/430899c217f34fae63b35c77f4d9bfd361c9de77/aws-java-sdk-s3/src/main/java/com/amazonaws/services/s3/AmazonS3Client.java#L3696-L3709 --- .../org/elasticsearch/repositories/s3/S3BlobContainer.java | 5 ++--- muted-tests.yml | 3 --- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java index ea1365796401..d7f5a6e6dfe3 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java @@ -899,11 +899,10 @@ class S3BlobContainer extends AbstractBlobContainer { logger.trace(() -> Strings.format("[%s]: compareAndExchangeRegister failed", key), e); if (e instanceof AmazonS3Exception amazonS3Exception && (amazonS3Exception.getStatusCode() == 404 - || amazonS3Exception.getStatusCode() == 0 && "NoSuchUpload".equals(amazonS3Exception.getErrorCode()))) { + || amazonS3Exception.getStatusCode() == 200 && "NoSuchUpload".equals(amazonS3Exception.getErrorCode()))) { // An uncaught 404 means that our multipart upload was aborted by a concurrent operation before we could complete it. // Also (rarely) S3 can start processing the request during a concurrent abort and this can result in a 200 OK with an - // NoSuchUpload... in the response, which the SDK translates to status code 0. Either way, this means - // that our write encountered contention: + // NoSuchUpload... in the response. Either way, this means that our write encountered contention: delegate.onResponse(OptionalBytesReference.MISSING); } else { delegate.onFailure(e); diff --git a/muted-tests.yml b/muted-tests.yml index ddcd4a988dbd..72b01ef7345d 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -394,9 +394,6 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/122680 - class: org.elasticsearch.entitlement.qa.EntitlementsDeniedIT issue: https://github.com/elastic/elasticsearch/issues/122566 -- class: org.elasticsearch.repositories.blobstore.testkit.analyze.S3RepositoryAnalysisRestIT - method: testRepositoryAnalysis - issue: https://github.com/elastic/elasticsearch/issues/122799 - class: org.elasticsearch.xpack.esql.action.EsqlActionBreakerIT issue: https://github.com/elastic/elasticsearch/issues/122810 From a1b16d907651837e7a0eb16ecce52893cd5f5eec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Iv=C3=A1n=20Cea=20Fontenla?= Date: Tue, 18 Feb 2025 11:47:18 +0100 Subject: [PATCH 03/29] ESQL: Add ser/deser to functions tests (#122498) # Why and what? First part of https://github.com/elastic/elasticsearch/issues/122588 Some functions don't serialize their Source. This sometimes makes them emit wrong warnings, with -1:-1 line and without the correct source text. This PR pretends to integrate ser/deserialization before executing some (randomly chosen) functions, as well as using a non-empty Source to build them. _Note:_ It's different from the SerializationTests: Here we check that, whether serialized or not, the functionality is identical --- .../xpack/esql/core/tree/Source.java | 4 ++ .../function/AbstractFunctionTestCase.java | 45 +++++++++++++++++-- .../AbstractScalarFunctionTestCase.java | 4 +- .../expression/function/TestCaseSupplier.java | 17 ++++++- .../function/aggregate/TopTests.java | 14 ++++-- .../function/fulltext/MatchTests.java | 17 +++++++ ...AbstractConfigurationFunctionTestCase.java | 32 ++++++------- .../FromAggregateMetricDoubleTests.java | 6 +++ .../scalar/convert/ToCartesianPointTests.java | 4 +- .../scalar/convert/ToCartesianShapeTests.java | 4 +- .../scalar/convert/ToDateNanosTests.java | 20 ++++----- .../scalar/convert/ToDatePeriodTests.java | 6 +++ .../scalar/convert/ToDatetimeTests.java | 24 +++++----- .../scalar/convert/ToDegreesTests.java | 8 ++-- .../scalar/convert/ToDoubleTests.java | 4 +- .../scalar/convert/ToGeoPointTests.java | 4 +- .../scalar/convert/ToGeoShapeTests.java | 4 +- .../function/scalar/convert/ToIPTests.java | 4 +- .../scalar/convert/ToIntegerTests.java | 36 +++++++-------- .../function/scalar/convert/ToLongTests.java | 24 +++++----- .../scalar/convert/ToTimeDurationTests.java | 6 +++ .../scalar/convert/ToUnsignedLongTests.java | 28 ++++++------ .../function/scalar/date/DateDiffTests.java | 12 +++-- .../scalar/date/DateExtractTests.java | 6 +-- .../function/scalar/date/DateParseTests.java | 10 ++--- .../function/scalar/date/NowTests.java | 5 +-- .../function/scalar/math/AcosTests.java | 8 ++-- .../function/scalar/math/AsinTests.java | 8 ++-- .../function/scalar/math/CoshTests.java | 8 ++-- .../function/scalar/math/Log10Tests.java | 16 +++---- .../function/scalar/math/LogTests.java | 16 +++---- .../function/scalar/math/PowTests.java | 4 +- .../function/scalar/math/RoundTests.java | 4 +- .../function/scalar/math/SinhTests.java | 8 ++-- .../function/scalar/math/SqrtTests.java | 12 ++--- .../multivalue/MvPSeriesWeightedSumTests.java | 5 ++- .../scalar/multivalue/MvPercentileTests.java | 6 ++- .../scalar/multivalue/MvSumTests.java | 10 ++++- .../BinarySpatialFunctionTestCase.java | 11 ++++- .../function/scalar/string/HashTests.java | 9 ++-- .../function/scalar/string/RepeatTests.java | 4 +- .../function/scalar/string/ReplaceTests.java | 10 ++++- .../function/scalar/string/SpaceTests.java | 8 ++-- .../operator/arithmetic/AddTests.java | 12 ++--- .../operator/arithmetic/DivTests.java | 12 ++--- .../operator/arithmetic/ModTests.java | 8 ++-- .../operator/arithmetic/MulTests.java | 8 ++-- .../operator/arithmetic/NegTests.java | 8 ++-- .../operator/arithmetic/SubTests.java | 8 ++-- 49 files changed, 337 insertions(+), 214 deletions(-) diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/tree/Source.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/tree/Source.java index 1b00ff453760..385f3c8304e0 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/tree/Source.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/tree/Source.java @@ -105,6 +105,10 @@ public final class Source implements Writeable { return text + location; } + /** + * @deprecated Sources created by this can't be correctly deserialized. For use in tests only. + */ + @Deprecated public static Source synthetic(String text) { return new Source(Location.EMPTY, text); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 65e0a2f1b20a..9ed140558a47 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -64,6 +64,7 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThan; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThanOrEqual; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NotEquals; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import org.elasticsearch.xpack.esql.optimizer.rules.logical.FoldNull; import org.elasticsearch.xpack.esql.parser.ExpressionBuilder; import org.elasticsearch.xpack.esql.planner.Layout; @@ -102,6 +103,7 @@ import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; import static org.elasticsearch.xpack.esql.EsqlTestUtils.randomLiteral; import static org.elasticsearch.xpack.esql.EsqlTestUtils.unboundLogicalOptimizerContext; import static org.elasticsearch.xpack.esql.SerializationTestUtils.assertSerialization; +import static org.elasticsearch.xpack.esql.SerializationTestUtils.serializeDeserialize; import static org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry.mapParam; import static org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry.param; import static org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry.paramWithoutAnnotation; @@ -331,7 +333,7 @@ public abstract class AbstractFunctionTestCase extends ESTestCase { String ordinal = includeOrdinal ? TypeResolutions.ParamOrdinal.fromIndex(badArgPosition).name().toLowerCase(Locale.ROOT) + " " : ""; String expectedTypeString = expectedTypeSupplier.apply(validPerPosition.get(badArgPosition), badArgPosition); String name = types.get(badArgPosition).typeName(); - return ordinal + "argument of [] must be [" + expectedTypeString + "], found value [" + name + "] type [" + name + "]"; + return ordinal + "argument of [source] must be [" + expectedTypeString + "], found value [" + name + "] type [" + name + "]"; } @FunctionalInterface @@ -522,7 +524,7 @@ public abstract class AbstractFunctionTestCase extends ESTestCase { * except those that have been marked with {@link TestCaseSupplier.TypedData#forceLiteral()}. */ protected final Expression buildFieldExpression(TestCaseSupplier.TestCase testCase) { - return build(testCase.getSource(), testCase.getDataAsFields()); + return randomSerializeDeserialize(build(testCase.getSource(), testCase.getDataAsFields())); } /** @@ -531,12 +533,47 @@ public abstract class AbstractFunctionTestCase extends ESTestCase { * those that have been marked with {@link TestCaseSupplier.TypedData#forceLiteral()}. */ protected final Expression buildDeepCopyOfFieldExpression(TestCaseSupplier.TestCase testCase) { + // We don't use `randomSerializeDeserialize()` here as the deep copied fields aren't deserializable right now return build(testCase.getSource(), testCase.getDataAsDeepCopiedFields()); } + private Expression randomSerializeDeserialize(Expression expression) { + if (randomBoolean()) { + return expression; + } + + return serializeDeserializeExpression(expression); + } + + /** + * Returns the expression after being serialized and deserialized. + *

+ * Tests randomly go through this method to ensure that the function retains the same logic after serialization and deserialization. + *

+ *

+ * Can be overridden to provide custom serialization and deserialization logic, or disable it if needed. + *

+ */ + protected Expression serializeDeserializeExpression(Expression expression) { + Expression newExpression = serializeDeserialize( + expression, + PlanStreamOutput::writeNamedWriteable, + in -> in.readNamedWriteable(Expression.class), + testCase.getConfiguration() // The configuration query should be == to the source text of the function for this to work + ); + + // Fields use synthetic sources, which can't be serialized. So we replace with the originals instead. + var dummyChildren = newExpression.children() + .stream() + .map(c -> new Literal(Source.EMPTY, "anything that won't match any test case", c.dataType())) + .toList(); + // We first replace them with other unrelated expressions to force a replace, as some replaceChildren() will check for equality + return newExpression.replaceChildrenSameSize(dummyChildren).replaceChildrenSameSize(expression.children()); + } + protected final Expression buildLiteralExpression(TestCaseSupplier.TestCase testCase) { assumeTrue("Data can't be converted to literals", testCase.canGetDataAsLiterals()); - return build(testCase.getSource(), testCase.getDataAsLiterals()); + return randomSerializeDeserialize(build(testCase.getSource(), testCase.getDataAsLiterals())); } public static EvaluatorMapper.ToEvaluator toEvaluator() { @@ -711,7 +748,7 @@ public abstract class AbstractFunctionTestCase extends ESTestCase { } public void testSerializationOfSimple() { - assertSerialization(buildFieldExpression(testCase)); + assertSerialization(buildFieldExpression(testCase), testCase.getConfiguration()); } /** diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java index 05202159a1bc..eb7d61dff8d1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java @@ -401,8 +401,8 @@ public abstract class AbstractScalarFunctionTestCase extends AbstractFunctionTes evaluator + "[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", dataType, is(nullValue()) - ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") - .withWarning("Line -1:-1: java.lang.ArithmeticException: " + typeNameOverflow) + ).withWarning("Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.") + .withWarning("Line 1:1: java.lang.ArithmeticException: " + typeNameOverflow) ); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index 4f89ba6bd050..f0e338c0cf28 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -16,12 +16,15 @@ import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.MapExpression; +import org.elasticsearch.xpack.esql.core.tree.Location; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.NumericUtils; +import org.elasticsearch.xpack.esql.session.Configuration; import org.elasticsearch.xpack.versionfield.Version; import org.hamcrest.Matcher; @@ -54,6 +57,9 @@ public record TestCaseSupplier(String name, List types, Supplier { + public static final Source TEST_SOURCE = new Source(new Location(1, 0), "source"); + public static final Configuration TEST_CONFIGURATION = EsqlTestUtils.configuration(TEST_SOURCE.text()); + private static final Logger logger = LogManager.getLogger(TestCaseSupplier.class); /** @@ -1388,6 +1394,10 @@ public record TestCaseSupplier(String name, List types, Supplier types, Supplier types, Supplier getData() { return data; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopTests.java index 1d18d66110fe..fd2d42c6e988 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/TopTests.java @@ -241,7 +241,7 @@ public class TopTests extends AbstractAggregationTestCase { new TestCaseSupplier.TypedData(0, DataType.INTEGER, "limit").forceLiteral(), new TestCaseSupplier.TypedData(new BytesRef("desc"), DataType.KEYWORD, "order").forceLiteral() ), - "Limit must be greater than 0 in [], found [0]" + "Limit must be greater than 0 in [source], found [0]" ) ), new TestCaseSupplier( @@ -252,7 +252,7 @@ public class TopTests extends AbstractAggregationTestCase { new TestCaseSupplier.TypedData(2, DataType.INTEGER, "limit").forceLiteral(), new TestCaseSupplier.TypedData(new BytesRef("wrong-order"), DataType.KEYWORD, "order").forceLiteral() ), - "Invalid order value in [], expected [ASC, DESC] but got [wrong-order]" + "Invalid order value in [source], expected [ASC, DESC] but got [wrong-order]" ) ), new TestCaseSupplier( @@ -263,7 +263,7 @@ public class TopTests extends AbstractAggregationTestCase { new TestCaseSupplier.TypedData(null, DataType.INTEGER, "limit").forceLiteral(), new TestCaseSupplier.TypedData(new BytesRef("desc"), DataType.KEYWORD, "order").forceLiteral() ), - "second argument of [] cannot be null, received [limit]" + "second argument of [source] cannot be null, received [limit]" ) ), new TestCaseSupplier( @@ -274,7 +274,7 @@ public class TopTests extends AbstractAggregationTestCase { new TestCaseSupplier.TypedData(1, DataType.INTEGER, "limit").forceLiteral(), new TestCaseSupplier.TypedData(null, DataType.KEYWORD, "order").forceLiteral() ), - "third argument of [] cannot be null, received [order]" + "third argument of [source] cannot be null, received [order]" ) ) ) @@ -317,4 +317,10 @@ public class TopTests extends AbstractAggregationTestCase { ); }); } + + @Override + protected Expression serializeDeserializeExpression(Expression expression) { + // TODO: This aggregation doesn't serialize the Source, and must be fixed. + return expression; + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java index 550471507526..f4a4877e3317 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java @@ -19,11 +19,13 @@ import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput; import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; +import static org.elasticsearch.xpack.esql.SerializationTestUtils.serializeDeserialize; import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; import static org.elasticsearch.xpack.esql.core.type.DataType.UNSUPPORTED; @@ -83,4 +85,19 @@ public class MatchTests extends AbstractMatchFullTextFunctionTests { } return match; } + + /** + * Copy of the overridden method that doesn't check for children size, as the {@code options} child isn't serialized in Match. + */ + @Override + protected Expression serializeDeserializeExpression(Expression expression) { + Expression newExpression = serializeDeserialize( + expression, + PlanStreamOutput::writeNamedWriteable, + in -> in.readNamedWriteable(Expression.class), + testCase.getConfiguration() // The configuration query should be == to the source text of the function for this to work + ); + // Fields use synthetic sources, which can't be serialized. So we use the originals instead. + return newExpression.replaceChildren(expression.children()); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AbstractConfigurationFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AbstractConfigurationFunctionTestCase.java index 8cc9091dae90..d695be2ce0ea 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AbstractConfigurationFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AbstractConfigurationFunctionTestCase.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.esql.expression.function.scalar; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.util.StringUtils; @@ -27,10 +26,22 @@ public abstract class AbstractConfigurationFunctionTestCase extends AbstractScal @Override protected Expression build(Source source, List args) { - return buildWithConfiguration(source, args, EsqlTestUtils.TEST_CFG); + return buildWithConfiguration(source, args, testCase.getConfiguration()); } - static Configuration randomConfiguration() { + public void testSerializationWithConfiguration() { + Configuration config = randomConfiguration(); + Expression expr = buildWithConfiguration(testCase.getSource(), testCase.getDataAsFields(), config); + + assertSerialization(expr, config); + + Configuration differentConfig = randomValueOtherThan(config, AbstractConfigurationFunctionTestCase::randomConfiguration); + + Expression differentExpr = buildWithConfiguration(testCase.getSource(), testCase.getDataAsFields(), differentConfig); + assertNotEquals(expr, differentExpr); + } + + private static Configuration randomConfiguration() { // TODO: Randomize the query and maybe the pragmas. return new Configuration( randomZone(), @@ -47,19 +58,4 @@ public abstract class AbstractConfigurationFunctionTestCase extends AbstractScal randomBoolean() ); } - - public void testSerializationWithConfiguration() { - Configuration config = randomConfiguration(); - Expression expr = buildWithConfiguration(testCase.getSource(), testCase.getDataAsFields(), config); - - assertSerialization(expr, config); - - Configuration differentConfig; - do { - differentConfig = randomConfiguration(); - } while (config.equals(differentConfig)); - - Expression differentExpr = buildWithConfiguration(testCase.getSource(), testCase.getDataAsFields(), differentConfig); - assertFalse(expr.equals(differentExpr)); - } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromAggregateMetricDoubleTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromAggregateMetricDoubleTests.java index 94d9bd5f64cb..caa723f125c2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromAggregateMetricDoubleTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromAggregateMetricDoubleTests.java @@ -75,4 +75,10 @@ public class FromAggregateMetricDoubleTests extends AbstractScalarFunctionTestCa ) ); } + + @Override + protected Expression serializeDeserializeExpression(Expression expression) { + // AggregateMetricDoubleLiteral can't be serialized when it's a literal + return expression; + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointTests.java index a4c31445f86b..7d08029d919e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointTests.java @@ -49,8 +49,8 @@ public class ToCartesianPointTests extends AbstractScalarFunctionTestCase { bytesRef -> { var exception = expectThrows(Exception.class, () -> CARTESIAN.wktToWkb(bytesRef.utf8ToString())); return List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: " + exception + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: " + exception ); } ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeTests.java index 9f8eb52de950..3f0a7999b8dd 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeTests.java @@ -50,8 +50,8 @@ public class ToCartesianShapeTests extends AbstractScalarFunctionTestCase { bytesRef -> { var exception = expectThrows(Exception.class, () -> CARTESIAN.wktToWkb(bytesRef.utf8ToString())); return List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: " + exception + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: " + exception ); } ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosTests.java index dc523bc24721..da32bb84aff1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDateNanosTests.java @@ -70,8 +70,8 @@ public class ToDateNanosTests extends AbstractScalarFunctionTestCase { Long.MIN_VALUE, -1L, List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.IllegalArgumentException: Nanosecond dates before 1970-01-01T00:00:00.000Z are not supported." + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.IllegalArgumentException: Nanosecond dates before 1970-01-01T00:00:00.000Z are not supported." ) ); TestCaseSupplier.forUnaryUnsignedLong( @@ -91,8 +91,8 @@ public class ToDateNanosTests extends AbstractScalarFunctionTestCase { BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.TWO), UNSIGNED_LONG_MAX, bi -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + bi + "] out of [long] range" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + bi + "] out of [long] range" ) ); TestCaseSupplier.forUnaryDouble( @@ -103,8 +103,8 @@ public class ToDateNanosTests extends AbstractScalarFunctionTestCase { Double.NEGATIVE_INFINITY, -Double.MIN_VALUE, d -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.IllegalArgumentException: Nanosecond dates before 1970-01-01T00:00:00.000Z are not supported." + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.IllegalArgumentException: Nanosecond dates before 1970-01-01T00:00:00.000Z are not supported." ) ); TestCaseSupplier.forUnaryDouble( @@ -115,8 +115,8 @@ public class ToDateNanosTests extends AbstractScalarFunctionTestCase { 9.223372036854777E18, // a "convenient" value larger than `(double) Long.MAX_VALUE` (== ...776E18) Double.POSITIVE_INFINITY, d -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + d + "] out of [long] range" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + d + "] out of [long] range" ) ); TestCaseSupplier.forUnaryStrings( @@ -125,8 +125,8 @@ public class ToDateNanosTests extends AbstractScalarFunctionTestCase { DataType.DATE_NANOS, bytesRef -> null, bytesRef -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.IllegalArgumentException: " + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.IllegalArgumentException: " + (bytesRef.utf8ToString().isEmpty() ? "cannot parse empty datetime" : ("failed to parse date field [" + bytesRef.utf8ToString() + "] with format [strict_date_optional_time_nanos]")) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatePeriodTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatePeriodTests.java index 8060326365d5..779c3a4ba635 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatePeriodTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatePeriodTests.java @@ -83,4 +83,10 @@ public class ToDatePeriodTests extends AbstractScalarFunctionTestCase { public void testSerializationOfSimple() { assertTrue("Serialization test does not apply", true); } + + @Override + protected Expression serializeDeserializeExpression(Expression expression) { + // Can't be serialized + return expression; + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java index 98c6e0ea8adc..b1727e692863 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeTests.java @@ -81,8 +81,8 @@ public class ToDatetimeTests extends AbstractScalarFunctionTestCase { BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.TWO), UNSIGNED_LONG_MAX, bi -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + bi + "] out of [long] range" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + bi + "] out of [long] range" ) ); TestCaseSupplier.forUnaryDouble( @@ -93,8 +93,8 @@ public class ToDatetimeTests extends AbstractScalarFunctionTestCase { Double.NEGATIVE_INFINITY, -9.223372036854777E18, // a "convenient" value smaller than `(double) Long.MIN_VALUE` (== ...776E18) d -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + d + "] out of [long] range" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + d + "] out of [long] range" ) ); TestCaseSupplier.forUnaryDouble( @@ -105,8 +105,8 @@ public class ToDatetimeTests extends AbstractScalarFunctionTestCase { 9.223372036854777E18, // a "convenient" value larger than `(double) Long.MAX_VALUE` (== ...776E18) Double.POSITIVE_INFINITY, d -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + d + "] out of [long] range" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + d + "] out of [long] range" ) ); TestCaseSupplier.forUnaryStrings( @@ -115,8 +115,8 @@ public class ToDatetimeTests extends AbstractScalarFunctionTestCase { DataType.DATETIME, bytesRef -> null, bytesRef -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.IllegalArgumentException: " + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.IllegalArgumentException: " + (bytesRef.utf8ToString().isEmpty() ? "cannot parse empty datetime" : ("failed to parse date field [" + bytesRef.utf8ToString() + "] with format [strict_date_optional_time]")) @@ -151,8 +151,8 @@ public class ToDatetimeTests extends AbstractScalarFunctionTestCase { DataType.DATETIME, bytesRef -> null, bytesRef -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.IllegalArgumentException: failed to parse date field [" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.IllegalArgumentException: failed to parse date field [" + ((BytesRef) bytesRef).utf8ToString() + "] with format [strict_date_optional_time]" ) @@ -171,8 +171,8 @@ public class ToDatetimeTests extends AbstractScalarFunctionTestCase { DataType.DATETIME, bytesRef -> null, bytesRef -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.IllegalArgumentException: failed to parse date field [" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.IllegalArgumentException: failed to parse date field [" + ((BytesRef) bytesRef).utf8ToString() + "] with format [strict_date_optional_time]" ) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesTests.java index cc780624b80a..5de5721d3fd6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesTests.java @@ -67,8 +67,8 @@ public class ToDegreesTests extends AbstractScalarFunctionTestCase { double deg = Math.toDegrees(d); ArrayList warnings = new ArrayList<>(2); if (Double.isNaN(deg) || Double.isInfinite(deg)) { - warnings.add("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded."); - warnings.add("Line -1:-1: java.lang.ArithmeticException: not a finite double number: " + deg); + warnings.add("Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded."); + warnings.add("Line 1:1: java.lang.ArithmeticException: not a finite double number: " + deg); } return warnings; }); @@ -84,8 +84,8 @@ public class ToDegreesTests extends AbstractScalarFunctionTestCase { DataType.DOUBLE, d -> null, d -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.ArithmeticException: not a finite double number: " + ((double) d > 0 ? "Infinity" : "-Infinity") + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.ArithmeticException: not a finite double number: " + ((double) d > 0 ? "Infinity" : "-Infinity") ) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java index c7e6b4f0f380..c7253e1b8059 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java @@ -65,8 +65,8 @@ public class ToDoubleTests extends AbstractScalarFunctionTestCase { () -> EsqlDataTypeConverter.stringToDouble(bytesRef.utf8ToString()) ); return List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: " + exception + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: " + exception ); }); TestCaseSupplier.forUnaryUnsignedLong( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointTests.java index 5fdd83d853c7..f15d260aecf4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointTests.java @@ -44,8 +44,8 @@ public class ToGeoPointTests extends AbstractScalarFunctionTestCase { TestCaseSupplier.forUnaryStrings(suppliers, evaluatorName.apply("FromString"), DataType.GEO_POINT, bytesRef -> null, bytesRef -> { var exception = expectThrows(Exception.class, () -> GEO.wktToWkb(bytesRef.utf8ToString())); return List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: " + exception + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: " + exception ); }); // strings that are geo point representations diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeTests.java index 9ac823553491..0f2c41feb3b0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeTests.java @@ -45,8 +45,8 @@ public class ToGeoShapeTests extends AbstractScalarFunctionTestCase { TestCaseSupplier.forUnaryStrings(suppliers, evaluatorName.apply("FromString"), DataType.GEO_SHAPE, bytesRef -> null, bytesRef -> { var exception = expectThrows(Exception.class, () -> GEO.wktToWkb(bytesRef.utf8ToString())); return List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: " + exception + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: " + exception ); }); // strings that are geo_shape representations diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPTests.java index 22517ae99596..0bd3164a53a7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPTests.java @@ -47,8 +47,8 @@ public class ToIPTests extends AbstractScalarFunctionTestCase { DataType.IP, bytesRef -> null, bytesRef -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.IllegalArgumentException: '" + bytesRef.utf8ToString() + "' is not an IP string literal." + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.IllegalArgumentException: '" + bytesRef.utf8ToString() + "' is not an IP string literal." ) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java index 0bd2aa24e80d..4b3dee96652d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerTests.java @@ -60,8 +60,8 @@ public class ToIntegerTests extends AbstractScalarFunctionTestCase { DataType.INTEGER, l -> null, l -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + ((Instant) l).toEpochMilli() + "] out of [integer] range" ) @@ -73,8 +73,8 @@ public class ToIntegerTests extends AbstractScalarFunctionTestCase { DataType.INTEGER, bytesRef -> null, bytesRef -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: Cannot parse number [" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: Cannot parse number [" + bytesRef.utf8ToString() + "]" ) @@ -98,8 +98,8 @@ public class ToIntegerTests extends AbstractScalarFunctionTestCase { Double.NEGATIVE_INFINITY, Integer.MIN_VALUE - 1d, d -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + d + "] out of [integer] range" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + d + "] out of [integer] range" ) ); // from doubles outside Integer's range, positive @@ -111,8 +111,8 @@ public class ToIntegerTests extends AbstractScalarFunctionTestCase { Integer.MAX_VALUE + 1d, Double.POSITIVE_INFINITY, d -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + d + "] out of [integer] range" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + d + "] out of [integer] range" ) ); @@ -135,8 +135,8 @@ public class ToIntegerTests extends AbstractScalarFunctionTestCase { BigInteger.valueOf(Integer.MAX_VALUE).add(BigInteger.ONE), UNSIGNED_LONG_MAX, ul -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + ul + "] out of [integer] range" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + ul + "] out of [integer] range" ) ); @@ -160,8 +160,8 @@ public class ToIntegerTests extends AbstractScalarFunctionTestCase { Long.MIN_VALUE, Integer.MIN_VALUE - 1L, l -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + l + "] out of [integer] range" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + l + "] out of [integer] range" ) ); @@ -174,8 +174,8 @@ public class ToIntegerTests extends AbstractScalarFunctionTestCase { Integer.MAX_VALUE + 1L, Long.MAX_VALUE, l -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + l + "] out of [integer] range" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + l + "] out of [integer] range" ) ); @@ -232,8 +232,8 @@ public class ToIntegerTests extends AbstractScalarFunctionTestCase { DataType.INTEGER, bytesRef -> null, bytesRef -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: Cannot parse number [" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: Cannot parse number [" + ((BytesRef) bytesRef).utf8ToString() + "]" ) @@ -255,8 +255,8 @@ public class ToIntegerTests extends AbstractScalarFunctionTestCase { DataType.INTEGER, bytesRef -> null, bytesRef -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: Cannot parse number [" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: Cannot parse number [" + ((BytesRef) bytesRef).utf8ToString() + "]" ) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java index c2d7565c3e6f..6b26f74a3871 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongTests.java @@ -59,8 +59,8 @@ public class ToLongTests extends AbstractScalarFunctionTestCase { DataType.LONG, bytesRef -> null, bytesRef -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: Cannot parse number [" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: Cannot parse number [" + bytesRef.utf8ToString() + "]" ) @@ -84,8 +84,8 @@ public class ToLongTests extends AbstractScalarFunctionTestCase { Double.NEGATIVE_INFINITY, Long.MIN_VALUE - 1d, d -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + d + "] out of [long] range" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + d + "] out of [long] range" ) ); // from doubles outside long's range, positive @@ -97,8 +97,8 @@ public class ToLongTests extends AbstractScalarFunctionTestCase { Long.MAX_VALUE + 1d, Double.POSITIVE_INFINITY, d -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + d + "] out of [long] range" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + d + "] out of [long] range" ) ); @@ -120,8 +120,8 @@ public class ToLongTests extends AbstractScalarFunctionTestCase { BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.ONE), UNSIGNED_LONG_MAX, ul -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + ul + "] out of [long] range" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + ul + "] out of [long] range" ) ); @@ -190,8 +190,8 @@ public class ToLongTests extends AbstractScalarFunctionTestCase { DataType.LONG, bytesRef -> null, bytesRef -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: Cannot parse number [" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: Cannot parse number [" + ((BytesRef) bytesRef).utf8ToString() + "]" ) @@ -213,8 +213,8 @@ public class ToLongTests extends AbstractScalarFunctionTestCase { DataType.LONG, bytesRef -> null, bytesRef -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: Cannot parse number [" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: Cannot parse number [" + ((BytesRef) bytesRef).utf8ToString() + "]" ) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToTimeDurationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToTimeDurationTests.java index 980eed1af910..0b0126a8db89 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToTimeDurationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToTimeDurationTests.java @@ -82,4 +82,10 @@ public class ToTimeDurationTests extends AbstractScalarFunctionTestCase { public void testSerializationOfSimple() { assertTrue("Serialization test does not apply", true); } + + @Override + protected Expression serializeDeserializeExpression(Expression expression) { + // Can't be serialized + return expression; + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongTests.java index adf104e8db5b..4a94c00a2f95 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongTests.java @@ -74,8 +74,8 @@ public class ToUnsignedLongTests extends AbstractScalarFunctionTestCase { // this shortcut here. Exception e = expectThrows(NumberFormatException.class, () -> new BigDecimal(bytesRef.utf8ToString())); return List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.NumberFormatException: " + e.getMessage() + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.NumberFormatException: " + e.getMessage() ); }); // from doubles within unsigned_long's range @@ -97,8 +97,8 @@ public class ToUnsignedLongTests extends AbstractScalarFunctionTestCase { Double.NEGATIVE_INFINITY, -1d, d -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + d + "] out of [unsigned_long] range" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + d + "] out of [unsigned_long] range" ) ); // from doubles outside Long's range, positive @@ -110,8 +110,8 @@ public class ToUnsignedLongTests extends AbstractScalarFunctionTestCase { UNSIGNED_LONG_MAX_AS_DOUBLE + 10e5, Double.POSITIVE_INFINITY, d -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + d + "] out of [unsigned_long] range" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + d + "] out of [unsigned_long] range" ) ); @@ -134,8 +134,8 @@ public class ToUnsignedLongTests extends AbstractScalarFunctionTestCase { Long.MIN_VALUE, -1L, l -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + l + "] out of [unsigned_long] range" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + l + "] out of [unsigned_long] range" ) ); @@ -158,8 +158,8 @@ public class ToUnsignedLongTests extends AbstractScalarFunctionTestCase { Integer.MIN_VALUE, -1, l -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + l + "] out of [unsigned_long] range" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + l + "] out of [unsigned_long] range" ) ); @@ -216,8 +216,8 @@ public class ToUnsignedLongTests extends AbstractScalarFunctionTestCase { DataType.UNSIGNED_LONG, bytesRef -> null, bytesRef -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + ((BytesRef) bytesRef).utf8ToString() + "] out of [unsigned_long] range" ) @@ -239,8 +239,8 @@ public class ToUnsignedLongTests extends AbstractScalarFunctionTestCase { DataType.UNSIGNED_LONG, bytesRef -> null, bytesRef -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [" + ((BytesRef) bytesRef).utf8ToString() + "] out of [unsigned_long] range" ) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java index e23283d89957..9e4199446540 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java @@ -91,7 +91,7 @@ public class DateDiffTests extends AbstractScalarFunctionTestCase { zdtStart2, zdtEnd2, "nanoseconds", - "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [300000000000] out of [integer] range" + "Line 1:1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [300000000000] out of [integer] range" ) ); @@ -241,7 +241,7 @@ public class DateDiffTests extends AbstractScalarFunctionTestCase { + "endTimestamp=Attribute[channel=2]]", DataType.INTEGER, equalTo(null) - ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + ).withWarning("Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.") .withWarning(warning) ), // Units as text case @@ -258,7 +258,7 @@ public class DateDiffTests extends AbstractScalarFunctionTestCase { + "endTimestamp=Attribute[channel=2]]", DataType.INTEGER, equalTo(null) - ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + ).withWarning("Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.") .withWarning(warning) ) ); @@ -268,4 +268,10 @@ public class DateDiffTests extends AbstractScalarFunctionTestCase { protected Expression build(Source source, List args) { return new DateDiff(source, args.get(0), args.get(1), args.get(2)); } + + @Override + protected Expression serializeDeserializeExpression(Expression expression) { + // TODO: This function doesn't serialize the Source, and must be fixed. + return expression; + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java index 01a84d7885ed..0067b022755e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractTests.java @@ -93,12 +93,12 @@ public class DateExtractTests extends AbstractConfigurationFunctionTestCase { "DateExtractMillisEvaluator[value=Attribute[channel=1], chronoField=Attribute[channel=0], zone=Z]", DataType.LONG, is(nullValue()) - ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + ).withWarning("Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.") .withWarning( - "Line -1:-1: java.lang.IllegalArgumentException: " + "Line 1:1: java.lang.IllegalArgumentException: " + "No enum constant java.time.temporal.ChronoField.NOT A UNIT" ) - .withFoldingException(InvalidArgumentException.class, "invalid date field for []: not a unit") + .withFoldingException(InvalidArgumentException.class, "invalid date field for [source]: not a unit") ) ) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java index b51866c0cc8a..8b53a1b9112b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java @@ -117,13 +117,13 @@ public class DateParseTests extends AbstractScalarFunctionTestCase { "DateParseEvaluator[val=Attribute[channel=1], formatter=Attribute[channel=0]]", DataType.DATETIME, is(nullValue()) - ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + ).withWarning("Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.") .withWarning( - "Line -1:-1: java.lang.IllegalArgumentException: Invalid format: " + "[not a format]: Unknown pattern letter: o" + "Line 1:1: java.lang.IllegalArgumentException: Invalid format: [not a format]: Unknown pattern letter: o" ) .withFoldingException( InvalidArgumentException.class, - "invalid date pattern for []: Invalid format: [not a format]: Unknown pattern letter: o" + "invalid date pattern for [source]: Invalid format: [not a format]: Unknown pattern letter: o" ) ), new TestCaseSupplier( @@ -137,9 +137,9 @@ public class DateParseTests extends AbstractScalarFunctionTestCase { "DateParseEvaluator[val=Attribute[channel=1], formatter=Attribute[channel=0]]", DataType.DATETIME, is(nullValue()) - ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + ).withWarning("Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.") .withWarning( - "Line -1:-1: java.lang.IllegalArgumentException: " + "Line 1:1: java.lang.IllegalArgumentException: " + "failed to parse date field [not a date] with format [yyyy-MM-dd]" ) ) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowTests.java index ed2c45f8c232..b3c3ab874f90 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowTests.java @@ -10,7 +10,6 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.date; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -42,7 +41,7 @@ public class NowTests extends AbstractConfigurationFunctionTestCase { List.of(), matchesPattern("LiteralsEvaluator\\[lit=.*]"), DataType.DATETIME, - equalTo(EsqlTestUtils.TEST_CFG.now().toInstant().toEpochMilli()) + equalTo(TestCaseSupplier.TEST_CONFIGURATION.now().toInstant().toEpochMilli()) ) ) ) @@ -56,7 +55,7 @@ public class NowTests extends AbstractConfigurationFunctionTestCase { @Override protected Matcher allNullsMatcher() { - return equalTo(EsqlTestUtils.TEST_CFG.now().toInstant().toEpochMilli()); + return equalTo(testCase.getConfiguration().now().toInstant().toEpochMilli()); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosTests.java index 3032c0171ddb..42f3612c2e96 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosTests.java @@ -38,8 +38,8 @@ public class AcosTests extends AbstractScalarFunctionTestCase { Double.NEGATIVE_INFINITY, Math.nextDown(-1d), List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.ArithmeticException: Acos input out of range" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.ArithmeticException: Acos input out of range" ) ) ); @@ -51,8 +51,8 @@ public class AcosTests extends AbstractScalarFunctionTestCase { Math.nextUp(1d), Double.POSITIVE_INFINITY, List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.ArithmeticException: Acos input out of range" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.ArithmeticException: Acos input out of range" ) ) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinTests.java index 805224396df4..4a5c9c0ba104 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinTests.java @@ -38,8 +38,8 @@ public class AsinTests extends AbstractScalarFunctionTestCase { Double.NEGATIVE_INFINITY, Math.nextDown(-1d), List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.ArithmeticException: Asin input out of range" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.ArithmeticException: Asin input out of range" ) ) ); @@ -51,8 +51,8 @@ public class AsinTests extends AbstractScalarFunctionTestCase { Math.nextUp(1d), Double.POSITIVE_INFINITY, List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.ArithmeticException: Asin input out of range" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.ArithmeticException: Asin input out of range" ) ) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshTests.java index 1e4cdeada4f1..f36ba832e240 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshTests.java @@ -43,8 +43,8 @@ public class CoshTests extends AbstractScalarFunctionTestCase { Double.NEGATIVE_INFINITY, -711d, List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.ArithmeticException: cosh overflow" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.ArithmeticException: cosh overflow" ) ) ); @@ -56,8 +56,8 @@ public class CoshTests extends AbstractScalarFunctionTestCase { 711d, Double.POSITIVE_INFINITY, List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.ArithmeticException: cosh overflow" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.ArithmeticException: cosh overflow" ) ) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java index 63535f68e38a..24810c699c51 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10Tests.java @@ -83,8 +83,8 @@ public class Log10Tests extends AbstractScalarFunctionTestCase { Integer.MIN_VALUE, 0, List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.ArithmeticException: Log of non-positive number" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.ArithmeticException: Log of non-positive number" ) ); TestCaseSupplier.forUnaryLong( @@ -95,8 +95,8 @@ public class Log10Tests extends AbstractScalarFunctionTestCase { Long.MIN_VALUE, 0L, List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.ArithmeticException: Log of non-positive number" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.ArithmeticException: Log of non-positive number" ) ); TestCaseSupplier.forUnaryUnsignedLong( @@ -107,8 +107,8 @@ public class Log10Tests extends AbstractScalarFunctionTestCase { BigInteger.ZERO, BigInteger.ZERO, List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.ArithmeticException: Log of non-positive number" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.ArithmeticException: Log of non-positive number" ) ); TestCaseSupplier.forUnaryDouble( @@ -119,8 +119,8 @@ public class Log10Tests extends AbstractScalarFunctionTestCase { Double.NEGATIVE_INFINITY, 0d, List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.ArithmeticException: Log of non-positive number" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.ArithmeticException: Log of non-positive number" ) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogTests.java index 7d3778346f4a..dc66f13c32bb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogTests.java @@ -92,8 +92,8 @@ public class LogTests extends AbstractScalarFunctionTestCase { 1d, Double.POSITIVE_INFINITY, List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.ArithmeticException: Log of non-positive number" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.ArithmeticException: Log of non-positive number" ) ) ); @@ -110,8 +110,8 @@ public class LogTests extends AbstractScalarFunctionTestCase { Double.NEGATIVE_INFINITY, 0d, List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.ArithmeticException: Log of non-positive number" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.ArithmeticException: Log of non-positive number" ) ) ); @@ -128,8 +128,8 @@ public class LogTests extends AbstractScalarFunctionTestCase { 1d, Double.POSITIVE_INFINITY, List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.ArithmeticException: Log of base 1" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.ArithmeticException: Log of base 1" ) ) ); @@ -181,8 +181,8 @@ public class LogTests extends AbstractScalarFunctionTestCase { Double.NEGATIVE_INFINITY, 0d, List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.ArithmeticException: Log of non-positive number" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.ArithmeticException: Log of non-positive number" ) ) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowTests.java index 10e3afc62f88..c036df27da0a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowTests.java @@ -72,8 +72,8 @@ public class PowTests extends AbstractScalarFunctionTestCase { 144d, Double.POSITIVE_INFINITY, List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.ArithmeticException: not a finite double number: Infinity" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.ArithmeticException: not a finite double number: Infinity" ) ) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java index e7a8d2d7ef9d..38af38a66bff 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java @@ -200,8 +200,8 @@ public class RoundTests extends AbstractScalarFunctionTestCase { "RoundUnsignedLongEvaluator[val=Attribute[channel=0], decimals=Attribute[channel=1]]", DataType.UNSIGNED_LONG, equalTo(null) - ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") - .withWarning("Line -1:-1: java.lang.ArithmeticException: unsigned_long overflow") + ).withWarning("Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.") + .withWarning("Line 1:1: java.lang.ArithmeticException: unsigned_long overflow") ) ); suppliers.add( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhTests.java index 37a72c417757..6f57b2d459b3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhTests.java @@ -43,8 +43,8 @@ public class SinhTests extends AbstractScalarFunctionTestCase { Double.NEGATIVE_INFINITY, -711d, List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.ArithmeticException: sinh overflow" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.ArithmeticException: sinh overflow" ) ) ); @@ -56,8 +56,8 @@ public class SinhTests extends AbstractScalarFunctionTestCase { 711d, Double.POSITIVE_INFINITY, List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.ArithmeticException: sinh overflow" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.ArithmeticException: sinh overflow" ) ) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java index f7c5e8b84983..a7cd99f6e9ef 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtTests.java @@ -81,8 +81,8 @@ public class SqrtTests extends AbstractScalarFunctionTestCase { Integer.MIN_VALUE, -1, List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.ArithmeticException: Square root of negative" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.ArithmeticException: Square root of negative" ) ); TestCaseSupplier.forUnaryLong( @@ -93,8 +93,8 @@ public class SqrtTests extends AbstractScalarFunctionTestCase { Long.MIN_VALUE, -1, List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.ArithmeticException: Square root of negative" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.ArithmeticException: Square root of negative" ) ); TestCaseSupplier.forUnaryDouble( @@ -105,8 +105,8 @@ public class SqrtTests extends AbstractScalarFunctionTestCase { Double.NEGATIVE_INFINITY, -Double.MIN_VALUE, List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.ArithmeticException: Square root of negative" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.ArithmeticException: Square root of negative" ) ); return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumTests.java index 47669cba7189..3443d4eb902b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPSeriesWeightedSumTests.java @@ -97,8 +97,9 @@ public class MvPSeriesWeightedSumTests extends AbstractScalarFunctionTestCase { Double.isFinite(expectedResult) ? closeTo(expectedResult, Math.abs(expectedResult * .00000001)) : nullValue() ); if (Double.isFinite(expectedResult) == false) { - return testCase.withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") - .withWarning("Line -1:-1: java.lang.ArithmeticException: double overflow"); + return testCase.withWarning( + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded." + ).withWarning("Line 1:1: java.lang.ArithmeticException: double overflow"); } return testCase; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileTests.java index 9c506ee0b595..40293e33928c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileTests.java @@ -324,9 +324,11 @@ public class MvPercentileTests extends AbstractScalarFunctionTestCase { evaluatorString(fieldType, percentileType), fieldType, nullValue() - ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + ).withWarning( + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded." + ) .withWarning( - "Line -1:-1: java.lang.IllegalArgumentException: Percentile parameter must be " + "Line 1:1: java.lang.IllegalArgumentException: Percentile parameter must be " + "a number between 0 and 100, found [" + percentile.doubleValue() + "]" diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java index 19bb915b405d..7b6f26fbb2b6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumTests.java @@ -78,8 +78,8 @@ public class MvSumTests extends AbstractMultivalueFunctionTestCase { "MvSum[field=Attribute[channel=0]]", dataType, is(nullValue()) - ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") - .withWarning("Line -1:-1: java.lang.ArithmeticException: " + typeNameOverflow) + ).withWarning("Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.") + .withWarning("Line 1:1: java.lang.ArithmeticException: " + typeNameOverflow) ); } @@ -87,4 +87,10 @@ public class MvSumTests extends AbstractMultivalueFunctionTestCase { protected Expression build(Source source, Expression field) { return new MvSum(source, field); } + + @Override + protected Expression serializeDeserializeExpression(Expression expression) { + // TODO: This function doesn't serialize the Source, and must be fixed. + return expression; + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunctionTestCase.java index c93d871ca2b8..68dd60048939 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunctionTestCase.java @@ -10,6 +10,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.spatial; import joptsimple.internal.Strings; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes; @@ -143,7 +144,7 @@ public abstract class BinarySpatialFunctionTestCase extends AbstractScalarFuncti Locale.ROOT, "%s argument of [%s] must be [%s], found value [%s] type [%s]", TypeResolutions.ParamOrdinal.fromIndex(2).toString().toLowerCase(Locale.ROOT), - "", + "source", "double", invalidType.typeName(), invalidType.typeName() @@ -161,7 +162,7 @@ public abstract class BinarySpatialFunctionTestCase extends AbstractScalarFuncti String ordinal = includeOrdinal ? TypeResolutions.ParamOrdinal.fromIndex(badArgPosition).name().toLowerCase(Locale.ROOT) + " " : ""; String expectedType = goodArgPosition >= 0 ? compatibleTypes(types.get(goodArgPosition)) : expected; String name = types.get(badArgPosition).typeName(); - return ordinal + "argument of [] must be [" + expectedType + "], found value [" + name + "] type [" + name + "]"; + return ordinal + "argument of [source] must be [" + expectedType + "], found value [" + name + "] type [" + name + "]"; } private static String compatibleTypes(DataType spatialDataType) { @@ -284,4 +285,10 @@ public abstract class BinarySpatialFunctionTestCase extends AbstractScalarFuncti } return count; } + + @Override + protected Expression serializeDeserializeExpression(Expression expression) { + // TODO: Functions inheriting from this superclass don't serialize the Source, and must be fixed. + return expression; + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashTests.java index fe62b6d7b4f1..338643858fb7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/HashTests.java @@ -53,9 +53,12 @@ public class HashTests extends AbstractScalarFunctionTestCase { "HashEvaluator[algorithm=Attribute[channel=0], input=Attribute[channel=1]]", DataType.KEYWORD, is(nullValue()) - ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") - .withWarning("Line -1:-1: java.security.NoSuchAlgorithmException: invalid MessageDigest not available") - .withFoldingException(InvalidArgumentException.class, "invalid algorithm for []: invalid MessageDigest not available"); + ).withWarning("Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.") + .withWarning("Line 1:1: java.security.NoSuchAlgorithmException: invalid MessageDigest not available") + .withFoldingException( + InvalidArgumentException.class, + "invalid algorithm for [source]: invalid MessageDigest not available" + ); })); return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, cases); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatTests.java index 2f1c2e7853c7..f4840aeae2a1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatTests.java @@ -117,8 +117,8 @@ public class RepeatTests extends AbstractScalarFunctionTestCase { "RepeatEvaluator[str=Attribute[channel=0], number=Attribute[channel=1]]", DataType.KEYWORD, nullValue() - ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") - .withWarning("Line -1:-1: java.lang.IllegalArgumentException: Number parameter cannot be negative, found [" + number + "]") + ).withWarning("Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.") + .withWarning("Line 1:1: java.lang.IllegalArgumentException: Number parameter cannot be negative, found [" + number + "]") .withFoldingException(IllegalArgumentException.class, "Number parameter cannot be negative, found [" + number + "]"); })); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceTests.java index dfc4db228f8e..bb27be3f67d3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceTests.java @@ -91,9 +91,9 @@ public class ReplaceTests extends AbstractScalarFunctionTestCase { "ReplaceEvaluator[str=Attribute[channel=0], regex=Attribute[channel=1], newStr=Attribute[channel=2]]", DataType.KEYWORD, equalTo(null) - ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + ).withWarning("Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.") .withWarning( - "Line -1:-1: java.util.regex.PatternSyntaxException: Unclosed character class near index 0\n[\n^".replaceAll( + "Line 1:1: java.util.regex.PatternSyntaxException: Unclosed character class near index 0\n[\n^".replaceAll( "\n", System.lineSeparator() ) @@ -139,4 +139,10 @@ public class ReplaceTests extends AbstractScalarFunctionTestCase { protected Expression build(Source source, List args) { return new Replace(source, args.get(0), args.get(1), args.get(2)); } + + @Override + protected Expression serializeDeserializeExpression(Expression expression) { + // TODO: This function doesn't serialize the Source, and must be fixed. + return expression; + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SpaceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SpaceTests.java index 40c9b33609a3..2513a4d47781 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SpaceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SpaceTests.java @@ -51,8 +51,8 @@ public class SpaceTests extends AbstractScalarFunctionTestCase { "SpaceEvaluator[number=Attribute[channel=0]]", DataType.KEYWORD, nullValue() - ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") - .withWarning("Line -1:-1: java.lang.IllegalArgumentException: Number parameter cannot be negative, found [" + number + "]") + ).withWarning("Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.") + .withWarning("Line 1:1: java.lang.IllegalArgumentException: Number parameter cannot be negative, found [" + number + "]") .withFoldingException(IllegalArgumentException.class, "Number parameter cannot be negative, found [" + number + "]"); })); @@ -64,9 +64,9 @@ public class SpaceTests extends AbstractScalarFunctionTestCase { "SpaceEvaluator[number=Attribute[channel=0]]", DataType.KEYWORD, nullValue() - ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + ).withWarning("Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.") .withWarning( - "Line -1:-1: java.lang.IllegalArgumentException: Creating strings longer than [" + max + "] bytes is not supported" + "Line 1:1: java.lang.IllegalArgumentException: Creating strings longer than [" + max + "] bytes is not supported" ) .withFoldingException(IllegalArgumentException.class, "Creating strings longer than [" + max + "] bytes is not supported"); })); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java index 62cb0bd868ee..40111410e95f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java @@ -88,8 +88,8 @@ public class AddTests extends AbstractScalarFunctionTestCase { "AddDoublesEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", DataType.DOUBLE, equalTo(null) - ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") - .withWarning("Line -1:-1: java.lang.ArithmeticException: not a finite double number: Infinity") + ).withWarning("Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.") + .withWarning("Line 1:1: java.lang.ArithmeticException: not a finite double number: Infinity") ), new TestCaseSupplier( List.of(DataType.DOUBLE, DataType.DOUBLE), @@ -101,8 +101,8 @@ public class AddTests extends AbstractScalarFunctionTestCase { "AddDoublesEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", DataType.DOUBLE, equalTo(null) - ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") - .withWarning("Line -1:-1: java.lang.ArithmeticException: not a finite double number: -Infinity") + ).withWarning("Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.") + .withWarning("Line 1:1: java.lang.ArithmeticException: not a finite double number: -Infinity") ) ) ); @@ -162,8 +162,8 @@ public class AddTests extends AbstractScalarFunctionTestCase { return List.of(); } catch (ArithmeticException e) { return List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.ArithmeticException: long overflow" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.ArithmeticException: long overflow" ); } }; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java index 70536cd0036e..b28d8fc40282 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivTests.java @@ -68,8 +68,8 @@ public class DivTests extends AbstractScalarFunctionTestCase { return List.of(); } return List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.ArithmeticException: / by zero" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.ArithmeticException: / by zero" ); }, false @@ -134,8 +134,8 @@ public class DivTests extends AbstractScalarFunctionTestCase { TestCaseSupplier.getSuppliersForNumericType(rhsType, 0, 0, true), evaluatorToString, (lhs, rhs) -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.ArithmeticException: / by zero" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.ArithmeticException: / by zero" ), suppliers, expected, @@ -154,8 +154,8 @@ public class DivTests extends AbstractScalarFunctionTestCase { TestCaseSupplier.ulongCases(BigInteger.ZERO, BigInteger.valueOf(Long.MAX_VALUE), true), TestCaseSupplier.ulongCases(BigInteger.ZERO, BigInteger.ZERO, true), List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.ArithmeticException: / by zero" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.ArithmeticException: / by zero" ), false ) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java index 2ef1cbe89e6a..b2d65ed7f561 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModTests.java @@ -121,8 +121,8 @@ public class ModTests extends AbstractScalarFunctionTestCase { TestCaseSupplier.getSuppliersForNumericType(rhsType, 0, 0, true), evaluatorToString, (lhs, rhs) -> List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.ArithmeticException: / by zero" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.ArithmeticException: / by zero" ), suppliers, expected, @@ -141,8 +141,8 @@ public class ModTests extends AbstractScalarFunctionTestCase { TestCaseSupplier.ulongCases(BigInteger.ZERO, BigInteger.valueOf(Long.MAX_VALUE), true), TestCaseSupplier.ulongCases(BigInteger.ZERO, BigInteger.ZERO, true), List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.ArithmeticException: / by zero" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.ArithmeticException: / by zero" ), false ) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulTests.java index 12b685eaeeb8..a2dc8379d1f4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulTests.java @@ -82,8 +82,8 @@ public class MulTests extends AbstractScalarFunctionTestCase { "MulDoublesEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", DataType.DOUBLE, equalTo(null) - ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") - .withWarning("Line -1:-1: java.lang.ArithmeticException: not a finite double number: Infinity") + ).withWarning("Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.") + .withWarning("Line 1:1: java.lang.ArithmeticException: not a finite double number: Infinity") ), new TestCaseSupplier( List.of(DataType.DOUBLE, DataType.DOUBLE), @@ -95,8 +95,8 @@ public class MulTests extends AbstractScalarFunctionTestCase { "MulDoublesEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", DataType.DOUBLE, equalTo(null) - ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") - .withWarning("Line -1:-1: java.lang.ArithmeticException: not a finite double number: -Infinity") + ).withWarning("Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.") + .withWarning("Line 1:1: java.lang.ArithmeticException: not a finite double number: -Infinity") ) )); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java index 15860d35539e..130656cf3ca5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java @@ -56,8 +56,8 @@ public class NegTests extends AbstractScalarFunctionTestCase { Integer.MIN_VALUE, Integer.MIN_VALUE, List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.ArithmeticException: integer overflow" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.ArithmeticException: integer overflow" ) ); TestCaseSupplier.forUnaryLong( @@ -78,8 +78,8 @@ public class NegTests extends AbstractScalarFunctionTestCase { Long.MIN_VALUE, Long.MIN_VALUE, List.of( - "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.ArithmeticException: long overflow" + "Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.", + "Line 1:1: java.lang.ArithmeticException: long overflow" ) ); TestCaseSupplier.forUnaryDouble( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java index 600db229e90a..54ef4eb7563d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubTests.java @@ -99,8 +99,8 @@ public class SubTests extends AbstractScalarFunctionTestCase { "SubDoublesEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", DataType.DOUBLE, equalTo(null) - ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") - .withWarning("Line -1:-1: java.lang.ArithmeticException: not a finite double number: Infinity") + ).withWarning("Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.") + .withWarning("Line 1:1: java.lang.ArithmeticException: not a finite double number: Infinity") ), new TestCaseSupplier( List.of(DataType.DOUBLE, DataType.DOUBLE), @@ -112,8 +112,8 @@ public class SubTests extends AbstractScalarFunctionTestCase { "SubDoublesEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", DataType.DOUBLE, equalTo(null) - ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") - .withWarning("Line -1:-1: java.lang.ArithmeticException: not a finite double number: -Infinity") + ).withWarning("Line 1:1: evaluation of [source] failed, treating result as null. Only first 20 failures recorded.") + .withWarning("Line 1:1: java.lang.ArithmeticException: not a finite double number: -Infinity") ) ) ); From cb62b442a1422a231fcb51440192d579b77719b6 Mon Sep 17 00:00:00 2001 From: Salvatore Campagna <93581129+salvatore-campagna@users.noreply.github.com> Date: Tue, 18 Feb 2025 12:31:32 +0100 Subject: [PATCH 04/29] Remove shard changes and add retention lease REST APIs (#122754) --- x-pack/plugin/ccr/build.gradle | 11 - .../xpack/ccr/rest/ShardChangesRestIT.java | 399 ------------------ .../java/org/elasticsearch/xpack/ccr/Ccr.java | 50 +-- .../ccr/rest/RestShardChangesAction.java | 366 ---------------- .../logsdb/seqno/RetentionLeaseRestIT.java | 375 ---------------- .../xpack/logsdb/LogsDBPlugin.java | 32 -- .../seqno/RestAddRetentionLeaseAction.java | 278 ------------ 7 files changed, 19 insertions(+), 1492 deletions(-) delete mode 100644 x-pack/plugin/ccr/src/javaRestTest/java/org/elasticsearch/xpack/ccr/rest/ShardChangesRestIT.java delete mode 100644 x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestShardChangesAction.java delete mode 100644 x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/seqno/RetentionLeaseRestIT.java delete mode 100644 x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/seqno/RestAddRetentionLeaseAction.java diff --git a/x-pack/plugin/ccr/build.gradle b/x-pack/plugin/ccr/build.gradle index 765c70abee37..f4ade5b44187 100644 --- a/x-pack/plugin/ccr/build.gradle +++ b/x-pack/plugin/ccr/build.gradle @@ -1,6 +1,5 @@ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' -apply plugin: 'elasticsearch.internal-java-rest-test' esplugin { name = 'x-pack-ccr' description = 'Elasticsearch Expanded Pack Plugin - CCR' @@ -34,16 +33,6 @@ tasks.named('internalClusterTestTestingConventions').configure { baseClass 'org.elasticsearch.test.ESIntegTestCase' } -tasks.named("javaRestTest").configure { - usesDefaultDistribution() -} - -restResources { - restApi { - include 'bulk', 'search', '_common', 'indices', 'index', 'cluster', 'data_stream' - } -} - addQaCheckDependencies(project) dependencies { diff --git a/x-pack/plugin/ccr/src/javaRestTest/java/org/elasticsearch/xpack/ccr/rest/ShardChangesRestIT.java b/x-pack/plugin/ccr/src/javaRestTest/java/org/elasticsearch/xpack/ccr/rest/ShardChangesRestIT.java deleted file mode 100644 index 4c6190447509..000000000000 --- a/x-pack/plugin/ccr/src/javaRestTest/java/org/elasticsearch/xpack/ccr/rest/ShardChangesRestIT.java +++ /dev/null @@ -1,399 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.ccr.rest; - -import org.apache.http.util.EntityUtils; -import org.elasticsearch.Build; -import org.elasticsearch.client.Request; -import org.elasticsearch.client.Response; -import org.elasticsearch.client.ResponseException; -import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.cluster.local.distribution.DistributionType; -import org.elasticsearch.test.rest.ESRestTestCase; -import org.elasticsearch.xcontent.json.JsonXContent; -import org.hamcrest.Matchers; -import org.junit.Before; -import org.junit.ClassRule; - -import java.io.IOException; -import java.time.Instant; -import java.time.ZoneOffset; -import java.time.format.DateTimeFormatter; -import java.util.List; -import java.util.Locale; -import java.util.Map; - -public class ShardChangesRestIT extends ESRestTestCase { - private static final String CCR_SHARD_CHANGES_ENDPOINT = "/%s/ccr/shard_changes"; - private static final String BULK_INDEX_ENDPOINT = "/%s/_bulk"; - private static final String DATA_STREAM_ENDPOINT = "/_data_stream/%s"; - private static final String INDEX_TEMPLATE_ENDPOINT = "/_index_template/%s"; - - private static final String[] SHARD_RESPONSE_FIELDS = new String[] { - "took_in_millis", - "operations", - "shard_id", - "index_abstraction", - "index", - "settings_version", - "max_seq_no_of_updates_or_deletes", - "number_of_operations", - "mapping_version", - "aliases_version", - "max_seq_no", - "global_checkpoint" }; - - private static final String BULK_INDEX_TEMPLATE = """ - { "index": { "op_type": "create" } } - { "@timestamp": "%s", "name": "%s" } - """;; - private static final String[] NAMES = { "skywalker", "leia", "obi-wan", "yoda", "chewbacca", "r2-d2", "c-3po", "darth-vader" }; - @ClassRule - public static ElasticsearchCluster cluster = ElasticsearchCluster.local() - .distribution(DistributionType.DEFAULT) - .setting("xpack.security.enabled", "false") - .setting("xpack.license.self_generated.type", "trial") - .build(); - - @Override - protected String getTestRestCluster() { - return cluster.getHttpAddresses(); - } - - @Before - public void assumeSnapshotBuild() { - assumeTrue("/{index}/ccr/shard_changes endpoint only available in snapshot builds", Build.current().isSnapshot()); - } - - public void testShardChangesNoOperation() throws IOException { - final String indexName = randomAlphanumericOfLength(10).toLowerCase(Locale.ROOT); - createIndex( - indexName, - Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.getKey(), "1s") - .build() - ); - assertTrue(indexExists(indexName)); - - final Request shardChangesRequest = new Request("GET", shardChangesEndpoint(indexName)); - assertOK(client().performRequest(shardChangesRequest)); - } - - public void testShardChangesDefaultParams() throws IOException { - final String indexName = randomAlphanumericOfLength(10).toLowerCase(Locale.ROOT); - final Settings settings = Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.getKey(), "1s") - .build(); - final String mappings = """ - { - "properties": { - "name": { - "type": "keyword" - } - } - } - """; - createIndex(indexName, settings, mappings); - assertTrue(indexExists(indexName)); - - assertOK(bulkIndex(indexName, randomIntBetween(10, 20))); - - final Request shardChangesRequest = new Request("GET", shardChangesEndpoint(indexName)); - final Response response = client().performRequest(shardChangesRequest); - assertOK(response); - assertShardChangesResponse( - XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(response.getEntity()), false), - indexName - ); - } - - public void testDataStreamShardChangesDefaultParams() throws IOException { - final String templateName = randomAlphanumericOfLength(8).toLowerCase(Locale.ROOT); - assertOK(createIndexTemplate(templateName, """ - { - "index_patterns": [ "test-*-*" ], - "data_stream": {}, - "priority": 100, - "template": { - "mappings": { - "properties": { - "@timestamp": { - "type": "date" - }, - "name": { - "type": "keyword" - } - } - } - } - }""")); - - final String dataStreamName = "test-" - + randomAlphanumericOfLength(5).toLowerCase(Locale.ROOT) - + "-" - + randomAlphaOfLength(5).toLowerCase(Locale.ROOT); - assertOK(createDataStream(dataStreamName)); - - assertOK(bulkIndex(dataStreamName, randomIntBetween(10, 20))); - - final Request shardChangesRequest = new Request("GET", shardChangesEndpoint(dataStreamName)); - final Response response = client().performRequest(shardChangesRequest); - assertOK(response); - assertShardChangesResponse( - XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(response.getEntity()), false), - dataStreamName - ); - } - - public void testIndexAliasShardChangesDefaultParams() throws IOException { - final String indexName = randomAlphanumericOfLength(10).toLowerCase(Locale.ROOT); - final String aliasName = randomAlphanumericOfLength(8).toLowerCase(Locale.ROOT); - final Settings settings = Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.getKey(), "1s") - .build(); - final String mappings = """ - { - "properties": { - "name": { - "type": "keyword" - } - } - } - """; - createIndex(indexName, settings, mappings); - assertTrue(indexExists(indexName)); - - final Request putAliasRequest = new Request("PUT", "/" + indexName + "/_alias/" + aliasName); - assertOK(client().performRequest(putAliasRequest)); - - assertOK(bulkIndex(aliasName, randomIntBetween(10, 20))); - - final Request shardChangesRequest = new Request("GET", shardChangesEndpoint(aliasName)); - final Response response = client().performRequest(shardChangesRequest); - assertOK(response); - assertShardChangesResponse( - XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(response.getEntity()), false), - aliasName - ); - } - - public void testShardChangesWithAllParameters() throws IOException { - final String indexName = randomAlphanumericOfLength(10).toLowerCase(Locale.ROOT); - createIndex( - indexName, - Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.getKey(), "1s") - .build() - ); - assertTrue(indexExists(indexName)); - - assertOK(bulkIndex(indexName, randomIntBetween(100, 200))); - - final Request shardChangesRequest = new Request("GET", shardChangesEndpoint(indexName)); - shardChangesRequest.addParameter("from_seq_no", "0"); - shardChangesRequest.addParameter("max_operations_count", "1"); - shardChangesRequest.addParameter("poll_timeout", "10s"); - shardChangesRequest.addParameter("max_batch_size", "1MB"); - - final Response response = client().performRequest(shardChangesRequest); - assertOK(response); - assertShardChangesResponse( - XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(response.getEntity()), false), - indexName - ); - } - - public void testShardChangesMultipleRequests() throws IOException { - final String indexName = randomAlphanumericOfLength(10).toLowerCase(Locale.ROOT); - createIndex( - indexName, - Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.getKey(), "1s") - .build() - ); - assertTrue(indexExists(indexName)); - - assertOK(bulkIndex(indexName, randomIntBetween(100, 200))); - - final Request firstRequest = new Request("GET", shardChangesEndpoint(indexName)); - firstRequest.addParameter("from_seq_no", "0"); - firstRequest.addParameter("max_operations_count", "10"); - firstRequest.addParameter("poll_timeout", "10s"); - firstRequest.addParameter("max_batch_size", "1MB"); - - final Response firstResponse = client().performRequest(firstRequest); - assertOK(firstResponse); - assertShardChangesResponse( - XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(firstResponse.getEntity()), false), - indexName - ); - - final Request secondRequest = new Request("GET", shardChangesEndpoint(indexName)); - secondRequest.addParameter("from_seq_no", "10"); - secondRequest.addParameter("max_operations_count", "10"); - secondRequest.addParameter("poll_timeout", "10s"); - secondRequest.addParameter("max_batch_size", "1MB"); - - final Response secondResponse = client().performRequest(secondRequest); - assertOK(secondResponse); - assertShardChangesResponse( - XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(secondResponse.getEntity()), false), - indexName - ); - } - - public void testShardChangesInvalidFromSeqNo() throws IOException { - final String indexName = randomAlphanumericOfLength(10).toLowerCase(Locale.ROOT); - createIndex(indexName); - assertTrue(indexExists(indexName)); - - final Request shardChangesRequest = new Request("GET", shardChangesEndpoint(indexName)); - shardChangesRequest.addParameter("from_seq_no", "-1"); - final ResponseException ex = assertThrows(ResponseException.class, () -> client().performRequest(shardChangesRequest)); - assertResponseException(ex, RestStatus.BAD_REQUEST, "Validation Failed: 1: fromSeqNo [-1] cannot be lower than 0"); - } - - public void testShardChangesInvalidMaxOperationsCount() throws IOException { - final String indexName = randomAlphanumericOfLength(10).toLowerCase(Locale.ROOT); - createIndex(indexName); - assertTrue(indexExists(indexName)); - - final Request shardChangesRequest = new Request("GET", shardChangesEndpoint(indexName)); - shardChangesRequest.addParameter("max_operations_count", "-1"); - final ResponseException ex = assertThrows(ResponseException.class, () -> client().performRequest(shardChangesRequest)); - assertResponseException(ex, RestStatus.BAD_REQUEST, "Validation Failed: 1: maxOperationCount [-1] cannot be lower than 0"); - } - - public void testShardChangesNegativePollTimeout() throws IOException { - final String indexName = randomAlphanumericOfLength(10).toLowerCase(Locale.ROOT); - createIndex(indexName); - assertTrue(indexExists(indexName)); - - final Request shardChangesRequest = new Request("GET", shardChangesEndpoint(indexName)); - shardChangesRequest.addParameter("poll_timeout", "-1s"); - assertOK(client().performRequest(shardChangesRequest)); - } - - public void testShardChangesInvalidMaxBatchSize() throws IOException { - final String indexName = randomAlphanumericOfLength(10).toLowerCase(Locale.ROOT); - createIndex(indexName); - assertTrue(indexExists(indexName)); - - final Request shardChangesRequest = new Request("GET", shardChangesEndpoint(indexName)); - shardChangesRequest.addParameter("max_batch_size", "-1MB"); - final ResponseException ex = assertThrows(ResponseException.class, () -> client().performRequest(shardChangesRequest)); - assertResponseException( - ex, - RestStatus.BAD_REQUEST, - "failed to parse setting [max_batch_size] with value [-1MB] as a size in bytes" - ); - } - - public void testShardChangesMissingIndex() throws IOException { - final String indexName = randomAlphanumericOfLength(10).toLowerCase(Locale.ROOT); - assertFalse(indexExists(indexName)); - - final Request shardChangesRequest = new Request("GET", shardChangesEndpoint(indexName)); - final ResponseException ex = assertThrows(ResponseException.class, () -> client().performRequest(shardChangesRequest)); - assertResponseException(ex, RestStatus.BAD_REQUEST, "Failed to process shard changes for index [" + indexName + "]"); - } - - private static Response bulkIndex(final String indexName, int numberOfDocuments) throws IOException { - final StringBuilder sb = new StringBuilder(); - - long timestamp = System.currentTimeMillis(); - for (int i = 0; i < numberOfDocuments; i++) { - sb.append( - String.format( - Locale.ROOT, - BULK_INDEX_TEMPLATE, - Instant.ofEpochMilli(timestamp).atOffset(ZoneOffset.UTC).format(DateTimeFormatter.ISO_OFFSET_DATE_TIME), - randomFrom(NAMES) - ) - ); - timestamp += 1000; // 1 second - } - - final Request request = new Request("POST", bulkEndpoint(indexName)); - request.setJsonEntity(sb.toString()); - request.addParameter("refresh", "true"); - return client().performRequest(request); - } - - private Response createDataStream(final String dataStreamName) throws IOException { - return client().performRequest(new Request("PUT", dataStreamEndpoint(dataStreamName))); - } - - private static Response createIndexTemplate(final String templateName, final String mappings) throws IOException { - final Request request = new Request("PUT", indexTemplateEndpoint(templateName)); - request.setJsonEntity(mappings); - return client().performRequest(request); - } - - private static String shardChangesEndpoint(final String indexName) { - return String.format(Locale.ROOT, CCR_SHARD_CHANGES_ENDPOINT, indexName); - } - - private static String bulkEndpoint(final String indexName) { - return String.format(Locale.ROOT, BULK_INDEX_ENDPOINT, indexName); - } - - private static String dataStreamEndpoint(final String dataStreamName) { - return String.format(Locale.ROOT, DATA_STREAM_ENDPOINT, dataStreamName); - } - - private static String indexTemplateEndpoint(final String templateName) { - return String.format(Locale.ROOT, INDEX_TEMPLATE_ENDPOINT, templateName); - } - - private void assertResponseException(final ResponseException ex, final RestStatus restStatus, final String error) { - assertEquals(restStatus.getStatus(), ex.getResponse().getStatusLine().getStatusCode()); - assertThat(ex.getMessage(), Matchers.containsString(error)); - } - - private void assertShardChangesResponse(final Map shardChangesResponseBody, final String indexAbstractionName) { - for (final String fieldName : SHARD_RESPONSE_FIELDS) { - final Object fieldValue = shardChangesResponseBody.get(fieldName); - assertNotNull("Field " + fieldName + " is missing or has a null value.", fieldValue); - - if ("index_abstraction".equals(fieldName)) { - assertEquals(indexAbstractionName, fieldValue); - } - - if ("operations".equals(fieldName)) { - if (fieldValue instanceof List operationsList) { - assertFalse("Field 'operations' is empty.", operationsList.isEmpty()); - - for (final Object operation : operationsList) { - assertNotNull("Operation is null.", operation); - if (operation instanceof Map operationMap) { - assertNotNull("seq_no is missing in operation.", operationMap.get("seq_no")); - assertNotNull("op_type is missing in operation.", operationMap.get("op_type")); - assertNotNull("primary_term is missing in operation.", operationMap.get("primary_term")); - } - } - } - } - } - } -} diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java index 5305e179058b..87a4c2c7d482 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.ccr; import org.apache.lucene.util.SetOnce; -import org.elasticsearch.Build; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequest; @@ -92,7 +91,6 @@ import org.elasticsearch.xpack.ccr.rest.RestPutAutoFollowPatternAction; import org.elasticsearch.xpack.ccr.rest.RestPutFollowAction; import org.elasticsearch.xpack.ccr.rest.RestResumeAutoFollowPatternAction; import org.elasticsearch.xpack.ccr.rest.RestResumeFollowAction; -import org.elasticsearch.xpack.ccr.rest.RestShardChangesAction; import org.elasticsearch.xpack.ccr.rest.RestUnfollowAction; import org.elasticsearch.xpack.core.XPackFeatureUsage; import org.elasticsearch.xpack.core.XPackField; @@ -114,7 +112,6 @@ import org.elasticsearch.xpack.core.ccr.action.ResumeFollowAction; import org.elasticsearch.xpack.core.ccr.action.ShardFollowTask; import org.elasticsearch.xpack.core.ccr.action.UnfollowAction; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -143,34 +140,7 @@ public class Ccr extends Plugin implements ActionPlugin, PersistentTaskPlugin, E public static final String REQUESTED_OPS_MISSING_METADATA_KEY = "es.requested_operations_missing"; public static final TransportVersion TRANSPORT_VERSION_ACTION_WITH_SHARD_ID = TransportVersions.V_8_9_X; - private static final List BASE_REST_HANDLERS = Arrays.asList( - // stats API - new RestFollowStatsAction(), - new RestCcrStatsAction(), - new RestFollowInfoAction(), - // follow APIs - new RestPutFollowAction(), - new RestResumeFollowAction(), - new RestPauseFollowAction(), - new RestUnfollowAction(), - // auto-follow APIs - new RestDeleteAutoFollowPatternAction(), - new RestPutAutoFollowPatternAction(), - new RestGetAutoFollowPatternAction(), - new RestPauseAutoFollowPatternAction(), - new RestResumeAutoFollowPatternAction(), - // forget follower API - new RestForgetFollowerAction() - ); - private static final List REST_HANDLERS = Collections.unmodifiableList(BASE_REST_HANDLERS); - - private static final List SNAPSHOT_BUILD_REST_HANDLERS; - static { - List snapshotBuildHandlers = new ArrayList<>(BASE_REST_HANDLERS); - snapshotBuildHandlers.add(new RestShardChangesAction()); - SNAPSHOT_BUILD_REST_HANDLERS = Collections.unmodifiableList(snapshotBuildHandlers); - } private final boolean enabled; private final Settings settings; private final CcrLicenseChecker ccrLicenseChecker; @@ -302,7 +272,25 @@ public class Ccr extends Plugin implements ActionPlugin, PersistentTaskPlugin, E return emptyList(); } - return Build.current().isSnapshot() ? SNAPSHOT_BUILD_REST_HANDLERS : REST_HANDLERS; + return Arrays.asList( + // stats API + new RestFollowStatsAction(), + new RestCcrStatsAction(), + new RestFollowInfoAction(), + // follow APIs + new RestPutFollowAction(), + new RestResumeFollowAction(), + new RestPauseFollowAction(), + new RestUnfollowAction(), + // auto-follow APIs + new RestDeleteAutoFollowPatternAction(), + new RestPutAutoFollowPatternAction(), + new RestGetAutoFollowPatternAction(), + new RestPauseAutoFollowPatternAction(), + new RestResumeAutoFollowPatternAction(), + // forget follower API + new RestForgetFollowerAction() + ); } public List getNamedWriteables() { diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestShardChangesAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestShardChangesAction.java deleted file mode 100644 index 0bbcfab97eb3..000000000000 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestShardChangesAction.java +++ /dev/null @@ -1,366 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.ccr.rest; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.admin.indices.stats.ShardStats; -import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexAbstraction; -import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.common.unit.ByteSizeUnit; -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.index.translog.Translog; -import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestResponse; -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.rest.RestUtils; -import org.elasticsearch.rest.action.RestActionListener; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xpack.ccr.Ccr; -import org.elasticsearch.xpack.ccr.action.ShardChangesAction; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Comparator; -import java.util.List; -import java.util.Locale; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; -import java.util.function.Supplier; - -import static org.elasticsearch.rest.RestRequest.Method.GET; - -/** - * A REST handler that retrieves shard changes in a specific index, data stream or alias whose name is - * provided as a parameter. It handles GET requests to the "/{index}/ccr/shard_changes" endpoint retrieving - * shard-level changes, such as Translog operations, mapping version, settings version, aliases version, - * the global checkpoint, maximum sequence number and maximum sequence number of updates or deletes. - *

- * In the case of a data stream, the first backing index is considered the target for retrieving shard changes. - * In the case of an alias, the first index that the alias points to is considered the target for retrieving - * shard changes. - *

- * Note: This handler is only available for snapshot builds. - */ -public class RestShardChangesAction extends BaseRestHandler { - - private static final long DEFAULT_FROM_SEQ_NO = 0L; - private static final ByteSizeValue DEFAULT_MAX_BATCH_SIZE = ByteSizeValue.of(32, ByteSizeUnit.MB); - private static final TimeValue DEFAULT_POLL_TIMEOUT = new TimeValue(1, TimeUnit.MINUTES); - private static final int DEFAULT_MAX_OPERATIONS_COUNT = 1024; - private static final int DEFAULT_TIMEOUT_SECONDS = 60; - private static final TimeValue GET_INDEX_UUID_TIMEOUT = new TimeValue(DEFAULT_TIMEOUT_SECONDS, TimeUnit.SECONDS); - private static final TimeValue SHARD_STATS_TIMEOUT = new TimeValue(DEFAULT_TIMEOUT_SECONDS, TimeUnit.SECONDS); - private static final String INDEX_PARAM_NAME = "index"; - private static final String FROM_SEQ_NO_PARAM_NAME = "from_seq_no"; - private static final String MAX_BATCH_SIZE_PARAM_NAME = "max_batch_size"; - private static final String POLL_TIMEOUT_PARAM_NAME = "poll_timeout"; - private static final String MAX_OPERATIONS_COUNT_PARAM_NAME = "max_operations_count"; - - @Override - public String getName() { - return "ccr_shard_changes_action"; - } - - @Override - public List routes() { - return List.of(new Route(GET, "/{index}/ccr/shard_changes")); - } - - /** - * Prepares the request for retrieving shard changes. - * - * @param restRequest The REST request. - * @param client The NodeClient for executing the request. - * @return A RestChannelConsumer for handling the request. - * @throws IOException If an error occurs while preparing the request. - */ - @Override - protected RestChannelConsumer prepareRequest(final RestRequest restRequest, final NodeClient client) throws IOException { - final var indexAbstractionName = restRequest.param(INDEX_PARAM_NAME); - final var fromSeqNo = restRequest.paramAsLong(FROM_SEQ_NO_PARAM_NAME, DEFAULT_FROM_SEQ_NO); - final var maxBatchSize = restRequest.paramAsSize(MAX_BATCH_SIZE_PARAM_NAME, DEFAULT_MAX_BATCH_SIZE); - final var pollTimeout = restRequest.paramAsTime(POLL_TIMEOUT_PARAM_NAME, DEFAULT_POLL_TIMEOUT); - final var maxOperationsCount = restRequest.paramAsInt(MAX_OPERATIONS_COUNT_PARAM_NAME, DEFAULT_MAX_OPERATIONS_COUNT); - - // NOTE: we first retrieve the concrete index name in case we are dealing with an alias or data stream. - // Then we use the concrete index name to retrieve the index UUID and shard stats. - final CompletableFuture indexNameCompletableFuture = asyncGetIndexName( - client, - indexAbstractionName, - client.threadPool().executor(Ccr.CCR_THREAD_POOL_NAME) - ); - final CompletableFuture indexUUIDCompletableFuture = indexNameCompletableFuture.thenCompose( - concreteIndexName -> asyncGetIndexUUID( - client, - concreteIndexName, - client.threadPool().executor(Ccr.CCR_THREAD_POOL_NAME), - RestUtils.getMasterNodeTimeout(restRequest) - ) - ); - final CompletableFuture shardStatsCompletableFuture = indexNameCompletableFuture.thenCompose( - concreteIndexName -> asyncShardStats(client, concreteIndexName, client.threadPool().executor(Ccr.CCR_THREAD_POOL_NAME)) - ); - - return channel -> CompletableFuture.allOf(indexUUIDCompletableFuture, shardStatsCompletableFuture).thenRun(() -> { - try { - final String concreteIndexName = indexNameCompletableFuture.get(DEFAULT_TIMEOUT_SECONDS, TimeUnit.SECONDS); - final String indexUUID = indexUUIDCompletableFuture.get(DEFAULT_TIMEOUT_SECONDS, TimeUnit.SECONDS); - final ShardStats shardStats = shardStatsCompletableFuture.get(DEFAULT_TIMEOUT_SECONDS, TimeUnit.SECONDS); - final ShardId shardId = shardStats.getShardRouting().shardId(); - final String expectedHistoryUUID = shardStats.getCommitStats().getUserData().get(Engine.HISTORY_UUID_KEY); - - final ShardChangesAction.Request shardChangesRequest = shardChangesRequest( - concreteIndexName, - indexUUID, - shardId, - expectedHistoryUUID, - fromSeqNo, - maxBatchSize, - pollTimeout, - maxOperationsCount - ); - client.execute(ShardChangesAction.INSTANCE, shardChangesRequest, new RestActionListener<>(channel) { - @Override - protected void processResponse(final ShardChangesAction.Response response) { - channel.sendResponse( - new RestResponse( - RestStatus.OK, - shardChangesResponseToXContent(response, indexAbstractionName, concreteIndexName, shardId) - ) - ); - } - }); - - } catch (InterruptedException | ExecutionException e) { - Thread.currentThread().interrupt(); - throw new IllegalStateException("Error while retrieving shard changes", e); - } catch (TimeoutException te) { - throw new IllegalStateException("Timeout while waiting for shard stats or index UUID", te); - } - }).exceptionally(ex -> { - channel.sendResponse( - new RestResponse( - RestStatus.BAD_REQUEST, - "Failed to process shard changes for index [" + indexAbstractionName + "] " + ex.getMessage() - ) - ); - return null; - }); - } - - /** - * Creates a ShardChangesAction.Request object with the provided parameters. - * - * @param indexName The name of the index for which to retrieve shard changes. - * @param indexUUID The UUID of the index. - * @param shardId The ShardId for which to retrieve shard changes. - * @param expectedHistoryUUID The expected history UUID of the shard. - * @param fromSeqNo The sequence number from which to start retrieving shard changes. - * @param maxBatchSize The maximum size of a batch of operations to retrieve. - * @param pollTimeout The maximum time to wait for shard changes. - * @param maxOperationsCount The maximum number of operations to retrieve in a single request. - * @return A ShardChangesAction.Request object with the provided parameters. - */ - private static ShardChangesAction.Request shardChangesRequest( - final String indexName, - final String indexUUID, - final ShardId shardId, - final String expectedHistoryUUID, - long fromSeqNo, - final ByteSizeValue maxBatchSize, - final TimeValue pollTimeout, - int maxOperationsCount - ) { - final ShardChangesAction.Request shardChangesRequest = new ShardChangesAction.Request( - new ShardId(new Index(indexName, indexUUID), shardId.id()), - expectedHistoryUUID - ); - shardChangesRequest.setFromSeqNo(fromSeqNo); - shardChangesRequest.setMaxBatchSize(maxBatchSize); - shardChangesRequest.setPollTimeout(pollTimeout); - shardChangesRequest.setMaxOperationCount(maxOperationsCount); - return shardChangesRequest; - } - - /** - * Converts the response to XContent JSOn format. - * - * @param response The ShardChangesAction response. - * @param indexAbstractionName The name of the index abstraction. - * @param concreteIndexName The name of the index. - * @param shardId The ShardId. - */ - private static XContentBuilder shardChangesResponseToXContent( - final ShardChangesAction.Response response, - final String indexAbstractionName, - final String concreteIndexName, - final ShardId shardId - ) { - try (XContentBuilder builder = XContentFactory.jsonBuilder()) { - builder.startObject(); - builder.field("index_abstraction", indexAbstractionName); - builder.field("index", concreteIndexName); - builder.field("shard_id", shardId); - builder.field("mapping_version", response.getMappingVersion()); - builder.field("settings_version", response.getSettingsVersion()); - builder.field("aliases_version", response.getAliasesVersion()); - builder.field("global_checkpoint", response.getGlobalCheckpoint()); - builder.field("max_seq_no", response.getMaxSeqNo()); - builder.field("max_seq_no_of_updates_or_deletes", response.getMaxSeqNoOfUpdatesOrDeletes()); - builder.field("took_in_millis", response.getTookInMillis()); - if (response.getOperations() != null && response.getOperations().length > 0) { - operationsToXContent(response, builder); - } - builder.endObject(); - - return builder; - } catch (IOException e) { - throw new RuntimeException(e); - } - } - - /** - * Converts the operations from a ShardChangesAction response to XContent JSON format. - * - * @param response The ShardChangesAction response containing the operations to be converted. - * @param builder The XContentBuilder to which the converted operations will be added. - * @throws IOException If an error occurs while writing to the XContentBuilder. - */ - private static void operationsToXContent(final ShardChangesAction.Response response, final XContentBuilder builder) throws IOException { - builder.field("number_of_operations", response.getOperations().length); - builder.field("operations"); - builder.startArray(); - for (final Translog.Operation operation : response.getOperations()) { - builder.startObject(); - builder.field("op_type", operation.opType()); - builder.field("seq_no", operation.seqNo()); - builder.field("primary_term", operation.primaryTerm()); - builder.endObject(); - } - builder.endArray(); - } - - /** - * Execute an asynchronous task using a task supplier and an executor service. - * - * @param The type of data to be retrieved. - * @param task The supplier task that provides the data. - * @param executorService The executorService service for executing the asynchronous task. - * @param errorMessage The error message to be thrown if the task execution fails. - * @return A CompletableFuture that completes with the retrieved data. - */ - private static CompletableFuture supplyAsyncTask( - final Supplier task, - final ExecutorService executorService, - final String errorMessage - ) { - return CompletableFuture.supplyAsync(() -> { - try { - return task.get(); - } catch (Exception e) { - throw new ElasticsearchException(errorMessage, e); - } - }, executorService); - } - - /** - * Asynchronously retrieves the index name for a given index, alias or data stream. - * If the name represents a data stream, the name of the first backing index is returned. - * If the name represents an alias, the name of the first index that the alias points to is returned. - * - * @param client The NodeClient for executing the asynchronous request. - * @param indexAbstractionName The name of the index, alias or data stream. - * @return A CompletableFuture that completes with the retrieved index name. - */ - private static CompletableFuture asyncGetIndexName( - final NodeClient client, - final String indexAbstractionName, - final ExecutorService executorService - ) { - return supplyAsyncTask(() -> { - final ClusterState clusterState = client.admin() - .cluster() - .prepareState(new TimeValue(DEFAULT_TIMEOUT_SECONDS, TimeUnit.SECONDS)) - .get(GET_INDEX_UUID_TIMEOUT) - .getState(); - final IndexAbstraction indexAbstraction = clusterState.metadata().getIndicesLookup().get(indexAbstractionName); - if (indexAbstraction == null) { - throw new IllegalArgumentException( - String.format(Locale.ROOT, "Invalid index or data stream name [%s]", indexAbstractionName) - ); - } - if (indexAbstraction.getType() == IndexAbstraction.Type.DATA_STREAM - || indexAbstraction.getType() == IndexAbstraction.Type.ALIAS) { - return indexAbstraction.getIndices().getFirst().getName(); - } - return indexAbstractionName; - }, executorService, "Error while retrieving index name for index or data stream [" + indexAbstractionName + "]"); - } - - /** - * Asynchronously retrieves the shard stats for a given index using an executor service. - * - * @param client The NodeClient for executing the asynchronous request. - * @param concreteIndexName The name of the index for which to retrieve shard statistics. - * @param executorService The executorService service for executing the asynchronous task. - * @return A CompletableFuture that completes with the retrieved ShardStats. - * @throws ElasticsearchException If an error occurs while retrieving shard statistics. - */ - private static CompletableFuture asyncShardStats( - final NodeClient client, - final String concreteIndexName, - final ExecutorService executorService - ) { - return supplyAsyncTask( - () -> Arrays.stream(client.admin().indices().prepareStats(concreteIndexName).clear().get(SHARD_STATS_TIMEOUT).getShards()) - .max(Comparator.comparingLong(shardStats -> shardStats.getCommitStats().getGeneration())) - .orElseThrow(() -> new ElasticsearchException("Unable to retrieve shard stats for index: " + concreteIndexName)), - executorService, - "Error while retrieving shard stats for index [" + concreteIndexName + "]" - ); - } - - /** - * Asynchronously retrieves the index UUID for a given index using an executor service. - * - * @param client The NodeClient for executing the asynchronous request. - * @param concreteIndexName The name of the index for which to retrieve the index UUID. - * @param executorService The executorService service for executing the asynchronous task. - * @param masterTimeout The timeout for waiting until the cluster is unblocked. - * @return A CompletableFuture that completes with the retrieved index UUID. - * @throws ElasticsearchException If an error occurs while retrieving the index UUID. - */ - private static CompletableFuture asyncGetIndexUUID( - final NodeClient client, - final String concreteIndexName, - final ExecutorService executorService, - TimeValue masterTimeout - ) { - return supplyAsyncTask( - () -> client.admin() - .indices() - .prepareGetIndex(masterTimeout) - .setIndices(concreteIndexName) - .get(GET_INDEX_UUID_TIMEOUT) - .getSetting(concreteIndexName, IndexMetadata.SETTING_INDEX_UUID), - executorService, - "Error while retrieving index UUID for index [" + concreteIndexName + "]" - ); - } -} diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/seqno/RetentionLeaseRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/seqno/RetentionLeaseRestIT.java deleted file mode 100644 index fa2d92a8fdb8..000000000000 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/seqno/RetentionLeaseRestIT.java +++ /dev/null @@ -1,375 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.logsdb.seqno; - -import org.apache.http.util.EntityUtils; -import org.elasticsearch.Build; -import org.elasticsearch.client.Request; -import org.elasticsearch.client.Response; -import org.elasticsearch.client.ResponseException; -import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.common.UUIDs; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.cluster.local.distribution.DistributionType; -import org.elasticsearch.test.rest.ESRestTestCase; -import org.elasticsearch.xcontent.json.JsonXContent; -import org.junit.Before; -import org.junit.ClassRule; - -import java.io.IOException; -import java.time.Instant; -import java.time.ZoneOffset; -import java.time.format.DateTimeFormatter; -import java.util.List; -import java.util.Locale; -import java.util.Map; - -public class RetentionLeaseRestIT extends ESRestTestCase { - private static final String ADD_RETENTION_LEASE_ENDPOINT = "/%s/seq_no/add_retention_lease"; - private static final String BULK_INDEX_ENDPOINT = "/%s/_bulk"; - private static final String[] DOCUMENT_NAMES = { "alpha", "beta", "gamma", "delta" }; - - @Before - public void assumeSnapshotBuild() { - assumeTrue("/{index}/seq_no/add_retention_lease endpoint only available in snapshot builds", Build.current().isSnapshot()); - } - - @ClassRule - public static ElasticsearchCluster cluster = ElasticsearchCluster.local() - .distribution(DistributionType.DEFAULT) - .setting("xpack.security.enabled", "false") - .setting("xpack.license.self_generated.type", "trial") - .build(); - - @Override - protected String getTestRestCluster() { - return cluster.getHttpAddresses(); - } - - public void testAddRetentionLeaseSuccessfully() throws IOException { - final String indexName = randomAlphanumericOfLength(10).toLowerCase(Locale.ROOT); - createIndex( - indexName, - Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build() - ); - assertTrue(indexExists(indexName)); - - assertOK(bulkIndex(indexName, randomIntBetween(10, 20))); - - final Request retentionLeaseRequest = new Request("PUT", String.format(Locale.ROOT, ADD_RETENTION_LEASE_ENDPOINT, indexName)); - final String retentionLeaseId = randomAlphaOfLength(6); - final String retentionLeaseSource = randomAlphaOfLength(8); - retentionLeaseRequest.addParameter("id", retentionLeaseId); - retentionLeaseRequest.addParameter("source", retentionLeaseSource); - - final Response response = client().performRequest(retentionLeaseRequest); - assertOK(response); - - assertRetentionLeaseResponseContent(response, indexName, indexName, retentionLeaseId, retentionLeaseSource); - assertRetentionLeaseExists(indexName, retentionLeaseId, retentionLeaseSource); - } - - public void testAddRetentionLeaseWithoutIdAndSource() throws IOException { - final String indexName = randomAlphanumericOfLength(10).toLowerCase(Locale.ROOT); - createIndex( - indexName, - Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build() - ); - assertTrue(indexExists(indexName)); - - assertOK(bulkIndex(indexName, randomIntBetween(10, 20))); - - final Request retentionLeaseRequest = new Request("PUT", String.format(Locale.ROOT, ADD_RETENTION_LEASE_ENDPOINT, indexName)); - - final Response response = client().performRequest(retentionLeaseRequest); - assertOK(response); - - assertRetentionLeaseResponseContent(response, indexName, indexName, null, null); - } - - public void testAddRetentionLeaseToDataStream() throws IOException { - final String templateName = randomAlphanumericOfLength(8).toLowerCase(Locale.ROOT); - assertOK(createIndexTemplate(templateName, """ - { - "index_patterns": [ "test-*-*" ], - "data_stream": {}, - "priority": 100, - "template": { - "settings": { - "number_of_shards": 1, - "number_of_replicas": 0 - }, - "mappings": { - "properties": { - "@timestamp": { - "type": "date" - }, - "name": { - "type": "keyword" - } - } - } - } - } - """)); - - final String dataStreamName = "test-" - + randomAlphanumericOfLength(5).toLowerCase(Locale.ROOT) - + "-" - + randomAlphaOfLength(5).toLowerCase(Locale.ROOT); - assertOK(createDataStream(dataStreamName)); - assertOK(bulkIndex(dataStreamName, randomIntBetween(10, 20))); - - final Request retentionLeaseRequest = new Request("PUT", String.format(Locale.ROOT, ADD_RETENTION_LEASE_ENDPOINT, dataStreamName)); - final String retentionLeaseId = randomAlphaOfLength(6); - final String retentionLeaseSource = randomAlphaOfLength(8); - retentionLeaseRequest.addParameter("id", retentionLeaseId); - retentionLeaseRequest.addParameter("source", retentionLeaseSource); - - final Response response = client().performRequest(retentionLeaseRequest); - assertOK(response); - - final String dataStreamBackingIndex = getFirstBackingIndex(dataStreamName); - assertRetentionLeaseResponseContent(response, dataStreamName, dataStreamBackingIndex, retentionLeaseId, retentionLeaseSource); - assertRetentionLeaseExists(dataStreamBackingIndex, retentionLeaseId, retentionLeaseSource); - } - - public void testAddRetentionLeaseUsingAlias() throws IOException { - final String indexName = randomAlphanumericOfLength(10).toLowerCase(Locale.ROOT); - createIndex( - indexName, - Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build() - ); - assertTrue(indexExists(indexName)); - - final String aliasName = randomAlphanumericOfLength(8).toLowerCase(Locale.ROOT); - final Request putAliasRequest = new Request("PUT", "/" + indexName + "/_alias/" + aliasName); - assertOK(client().performRequest(putAliasRequest)); - - assertOK(bulkIndex(aliasName, randomIntBetween(10, 20))); - - final Request retentionLeaseRequest = new Request("PUT", String.format(Locale.ROOT, ADD_RETENTION_LEASE_ENDPOINT, aliasName)); - final String retentionLeaseId = randomAlphaOfLength(6); - final String retentionLeaseSource = randomAlphaOfLength(8); - retentionLeaseRequest.addParameter("id", retentionLeaseId); - retentionLeaseRequest.addParameter("source", retentionLeaseSource); - - final Response response = client().performRequest(retentionLeaseRequest); - assertOK(response); - - assertRetentionLeaseResponseContent(response, aliasName, indexName, retentionLeaseId, retentionLeaseSource); - assertRetentionLeaseExists(indexName, retentionLeaseId, retentionLeaseSource); - } - - public void testAddRetentionLeaseMissingIndex() throws IOException { - final String missingIndexName = randomAlphanumericOfLength(10).toLowerCase(Locale.ROOT); - assertFalse(indexExists(missingIndexName)); - - final Request retentionLeaseRequest = new Request( - "PUT", - String.format(Locale.ROOT, ADD_RETENTION_LEASE_ENDPOINT, missingIndexName) - ); - final ResponseException exception = assertThrows(ResponseException.class, () -> client().performRequest(retentionLeaseRequest)); - assertResponseException(exception, RestStatus.BAD_REQUEST, "Error adding retention lease for [" + missingIndexName + "]"); - } - - public void testAddRetentionLeaseInvalidParameters() throws IOException { - final String indexName = randomAlphanumericOfLength(10).toLowerCase(Locale.ROOT); - createIndex( - indexName, - Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build() - ); - assertTrue(indexExists(indexName)); - assertOK(bulkIndex(indexName, randomIntBetween(10, 20))); - - final Request retentionLeaseRequest = new Request("PUT", String.format(Locale.ROOT, ADD_RETENTION_LEASE_ENDPOINT, indexName)); - retentionLeaseRequest.addParameter("id", null); - retentionLeaseRequest.addParameter("source", randomBoolean() ? UUIDs.randomBase64UUID() : "test-source"); - - final ResponseException exception = assertThrows(ResponseException.class, () -> client().performRequest(retentionLeaseRequest)); - assertResponseException(exception, RestStatus.BAD_REQUEST, "retention lease ID can not be empty"); - } - - public void testAddMultipleRetentionLeasesForSameShard() throws IOException { - final String indexName = randomAlphanumericOfLength(10).toLowerCase(Locale.ROOT); - createIndex( - indexName, - Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build() - ); - assertTrue(indexExists(indexName)); - assertOK(bulkIndex(indexName, randomIntBetween(10, 20))); - - int numberOfLeases = randomIntBetween(2, 5); - for (int i = 0; i < numberOfLeases; i++) { - final Request retentionLeaseRequest = new Request("PUT", String.format(Locale.ROOT, ADD_RETENTION_LEASE_ENDPOINT, indexName)); - retentionLeaseRequest.addParameter("id", "lease-" + i); - retentionLeaseRequest.addParameter("source", "test-source-" + i); - - final Response response = client().performRequest(retentionLeaseRequest); - assertOK(response); - - assertRetentionLeaseResponseContent(response, indexName, indexName, "lease-" + i, "test-source-" + i); - } - - for (int i = 0; i < numberOfLeases; i++) { - assertRetentionLeaseExists(indexName, "lease-" + i, "test-source-" + i); - } - } - - private static Response bulkIndex(final String indexName, int numberOfDocuments) throws IOException { - final StringBuilder sb = new StringBuilder(); - long timestamp = System.currentTimeMillis(); - - for (int i = 0; i < numberOfDocuments; i++) { - sb.append( - String.format( - Locale.ROOT, - "{ \"index\": {} }\n{ \"@timestamp\": \"%s\", \"name\": \"%s\" }\n", - Instant.ofEpochMilli(timestamp).atOffset(ZoneOffset.UTC).format(DateTimeFormatter.ISO_OFFSET_DATE_TIME), - randomFrom(DOCUMENT_NAMES) - ) - ); - timestamp += 1000; - } - - final Request request = new Request("POST", String.format(Locale.ROOT, BULK_INDEX_ENDPOINT, indexName)); - request.setJsonEntity(sb.toString()); - request.addParameter("refresh", "true"); - return client().performRequest(request); - } - - private void assertResponseException(final ResponseException exception, final RestStatus expectedStatus, final String expectedMessage) { - assertEquals(expectedStatus.getStatus(), exception.getResponse().getStatusLine().getStatusCode()); - assertTrue(exception.getMessage().contains(expectedMessage)); - } - - private Map getRetentionLeases(final String indexName) throws IOException { - final Request statsRequest = new Request("GET", "/" + indexName + "/_stats"); - statsRequest.addParameter("level", "shards"); - - final Response response = client().performRequest(statsRequest); - assertOK(response); - - final Map responseMap = XContentHelper.convertToMap( - JsonXContent.jsonXContent, - EntityUtils.toString(response.getEntity()), - false - ); - - @SuppressWarnings("unchecked") - final Map indices = (Map) responseMap.get("indices"); - if (indices == null || indices.containsKey(indexName) == false) { - throw new IllegalArgumentException("No shard stats found for: " + indexName); - } - - @SuppressWarnings("unchecked") - final Map shards = (Map) ((Map) indices.get(indexName)).get("shards"); - - @SuppressWarnings("unchecked") - final List> shardList = (List>) shards.get("0"); - - return getRetentionLeases(indexName, shardList); - } - - private static Map getRetentionLeases(final String indexName, final List> shardList) { - final Map shardStats = shardList.getFirst(); - - @SuppressWarnings("unchecked") - final Map retentionLeases = (Map) shardStats.get("retention_leases"); - if (retentionLeases == null) { - throw new IllegalArgumentException("No retention leases found for shard 0 of index: " + indexName); - } - return retentionLeases; - } - - private void assertRetentionLeaseExists( - final String indexAbstractionName, - final String expectedRetentionLeaseId, - final String expectedRetentionLeaseSource - ) throws IOException { - final Map retentionLeases = getRetentionLeases(indexAbstractionName); - - @SuppressWarnings("unchecked") - final List> leases = (List>) retentionLeases.get("leases"); - - boolean retentionLeaseExists = leases.stream().anyMatch(lease -> { - final String id = (String) lease.get("id"); - final String source = (String) lease.get("source"); - return expectedRetentionLeaseId.equals(id) && expectedRetentionLeaseSource.equals(source); - }); - - assertTrue( - "Retention lease with ID [" + expectedRetentionLeaseId + "] and source [" + expectedRetentionLeaseSource + "] does not exist.", - retentionLeaseExists - ); - } - - private Response createDataStream(final String dataStreamName) throws IOException { - return client().performRequest(new Request("PUT", "/_data_stream/" + dataStreamName)); - } - - private String getFirstBackingIndex(final String dataStreamName) throws IOException { - final Response response = client().performRequest(new Request("GET", "/_data_stream/" + dataStreamName)); - - final Map responseMap = XContentHelper.convertToMap( - JsonXContent.jsonXContent, - EntityUtils.toString(response.getEntity()), - false - ); - - @SuppressWarnings("unchecked") - final List> dataStreams = (List>) responseMap.get("data_streams"); - - if (dataStreams == null || dataStreams.isEmpty()) { - throw new IllegalArgumentException("No data stream found for name: " + dataStreamName); - } - - @SuppressWarnings("unchecked") - final List> backingIndices = (List>) dataStreams.get(0).get("indices"); - - if (backingIndices == null || backingIndices.isEmpty()) { - throw new IllegalArgumentException("No backing indices found for data stream: " + dataStreamName); - } - - return (String) backingIndices.getFirst().get("index_name"); - } - - private static Response createIndexTemplate(final String templateName, final String mappings) throws IOException { - final Request request = new Request("PUT", "/_index_template/" + templateName); - request.setJsonEntity(mappings); - return client().performRequest(request); - } - - private void assertRetentionLeaseResponseContent( - final Response response, - final String expectedIndexAbstraction, - final String expectedConcreteIndex, - final String expectedLeaseId, - final String expectedLeaseSource - ) throws IOException { - final Map responseBody = XContentHelper.convertToMap( - JsonXContent.jsonXContent, - EntityUtils.toString(response.getEntity()), - false - ); - - assertEquals("Unexpected index abstraction in response", expectedIndexAbstraction, responseBody.get("index_abstraction")); - assertEquals("Unexpected concrete index in response", expectedConcreteIndex, responseBody.get("index")); - assertNotNull("Shard ID missing in response", responseBody.get("shard_id")); - - if (expectedLeaseId != null) { - assertEquals("Unexpected lease ID in response", expectedLeaseId, responseBody.get("id")); - } - if (expectedLeaseSource != null) { - assertEquals("Unexpected lease source in response", expectedLeaseSource, responseBody.get("source")); - } - } -} diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java index 20f114389fe4..1fb46f74a09b 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java @@ -7,38 +7,24 @@ package org.elasticsearch.xpack.logsdb; -import org.elasticsearch.Build; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.settings.ClusterSettings; -import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsFilter; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexSettingProvider; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.license.LicenseService; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.rest.RestController; -import org.elasticsearch.rest.RestHandler; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; -import org.elasticsearch.xpack.logsdb.seqno.RestAddRetentionLeaseAction; import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.List; -import java.util.function.Predicate; -import java.util.function.Supplier; import static org.elasticsearch.xpack.logsdb.LogsdbLicenseService.FALLBACK_SETTING; @@ -113,24 +99,6 @@ public class LogsDBPlugin extends Plugin implements ActionPlugin { return actions; } - @Override - public Collection getRestHandlers( - Settings settings, - NamedWriteableRegistry namedWriteableRegistry, - RestController restController, - ClusterSettings clusterSettings, - IndexScopedSettings indexScopedSettings, - SettingsFilter settingsFilter, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier nodesInCluster, - Predicate clusterSupportsFeature - ) { - if (Build.current().isSnapshot()) { - return List.of(new RestAddRetentionLeaseAction()); - } - return Collections.emptyList(); - } - protected XPackLicenseState getLicenseState() { return XPackPlugin.getSharedLicenseState(); } diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/seqno/RestAddRetentionLeaseAction.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/seqno/RestAddRetentionLeaseAction.java deleted file mode 100644 index 1370b18a431b..000000000000 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/seqno/RestAddRetentionLeaseAction.java +++ /dev/null @@ -1,278 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.logsdb.seqno; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.admin.indices.stats.ShardStats; -import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexAbstraction; -import org.elasticsearch.common.UUIDs; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.seqno.RetentionLeaseActions; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.RestChannel; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestResponse; -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.rest.action.RestActionListener; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentFactory; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Comparator; -import java.util.List; -import java.util.Locale; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.TimeUnit; -import java.util.function.Supplier; - -import static org.elasticsearch.rest.RestRequest.Method.PUT; - -/** - * This class implements a REST API for adding a retention lease to a shard in Elasticsearch. - * Retention leases ensure that specific sequence numbers are retained, even as the global checkpoint - * advances during indexing. This guarantees seq_no values availability until the retention lease is - * removed. - * - * The API supports adding retention leases to indices, data streams, and index aliases. For data streams - * or aliases, the first backing index or underlying index is identified, and the retention lease is added - * to its shard. - * - * **Note:** This REST API is available only in Elasticsearch snapshot builds and is intended solely - * for benchmarking purposes, such as benchmarking operations like the shard changes API in Rally tracks. - * It is not intended for use in production environments. - * - * The response provides details about the added retention lease, including the target index, - * shard ID, retention lease ID, and source. - */ -public class RestAddRetentionLeaseAction extends BaseRestHandler { - - private static final int DEFAULT_TIMEOUT_SECONDS = 60; - private static final TimeValue SHARD_STATS_TIMEOUT = new TimeValue(DEFAULT_TIMEOUT_SECONDS, TimeUnit.SECONDS); - private static final TimeValue GET_INDEX_TIMEOUT = new TimeValue(DEFAULT_TIMEOUT_SECONDS, TimeUnit.SECONDS); - private static final String INDEX_PARAM = "index"; - private static final String ID_PARAM = "id"; - private static final String SOURCE_PARAM = "source"; - - @Override - public String getName() { - return "add_retention_lease_action"; - } - - @Override - public List routes() { - return List.of(new Route(PUT, "/{index}/seq_no/add_retention_lease")); - } - - /** - * Prepare a request to add a retention lease. When the target is an alias or data stream we just - * get the first shard of the first index using the shard stats api. - * - * @param request the request to execute - * @param client The NodeClient for executing the request. - * @return A RestChannelConsumer for handling the request. - * @throws IOException If an error occurs while preparing the request. - */ - @Override - protected RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - final String indexAbstractionName = request.param(INDEX_PARAM); - final String retentionLeaseId = request.param(ID_PARAM, UUIDs.randomBase64UUID()); - final String retentionLeaseSource = request.param(SOURCE_PARAM, UUIDs.randomBase64UUID()); - - return channel -> asyncGetIndexName(client, indexAbstractionName, client.threadPool().executor(ThreadPool.Names.GENERIC)) - .thenCompose( - concreteIndexName -> asyncShardStats(client, concreteIndexName, client.threadPool().executor(ThreadPool.Names.GENERIC)) - .thenCompose( - shardStats -> addRetentionLease( - channel, - client, - indexAbstractionName, - concreteIndexName, - shardStats, - retentionLeaseId, - retentionLeaseSource - ) - ) - ) - .exceptionally(ex -> { - final String message = ex.getCause() != null ? ex.getCause().getMessage() : ex.getMessage(); - channel.sendResponse( - new RestResponse(RestStatus.BAD_REQUEST, "Error adding retention lease for [" + indexAbstractionName + "]: " + message) - ); - return null; - }); - } - - private static XContentBuilder addRetentionLeaseResponseToXContent( - final String indexAbstractionName, - final String concreteIndexName, - final ShardId shardId, - final String retentionLeaseId, - final String retentionLeaseSource - ) { - try (XContentBuilder builder = XContentFactory.jsonBuilder()) { - builder.startObject(); - builder.field("index_abstraction", indexAbstractionName); - builder.field("index", concreteIndexName); - builder.field("shard_id", shardId); - builder.field("id", retentionLeaseId); - builder.field("source", retentionLeaseSource); - builder.endObject(); - - return builder; - } catch (IOException e) { - throw new RuntimeException(e); - } - } - - /** - * Adds a retention lease to a specific shard in an index, data stream, or alias. - * This operation is asynchronous and sends the response back to the client through the provided {@link RestChannel}. - * - * @param channel The {@link RestChannel} used to send the response back to the client. - * @param client The {@link NodeClient} used to execute the retention lease addition request. - * @param indexAbstractionName The name of the index, data stream, or alias for which the retention lease is being added. - * @param shardStats The {@link ShardStats} of the target shard where the retention lease will be added. - * @param retentionLeaseId A unique identifier for the retention lease being added. This identifies the lease in future operations. - * @param retentionLeaseSource A description or source of the retention lease request, often used for auditing or tracing purposes. - * @return A {@link CompletableFuture} that completes when the operation finishes. If the operation succeeds, the future completes - * successfully with {@code null}. If an error occurs, the future completes exceptionally with the corresponding exception. - * @throws ElasticsearchException If the request fails or encounters an unexpected error. - */ - private CompletableFuture addRetentionLease( - final RestChannel channel, - final NodeClient client, - final String indexAbstractionName, - final String concreteIndexName, - final ShardStats shardStats, - final String retentionLeaseId, - final String retentionLeaseSource - ) { - final CompletableFuture completableFuture = new CompletableFuture<>(); - try { - final ShardId shardId = shardStats.getShardRouting().shardId(); - final RetentionLeaseActions.AddRequest addRetentionLeaseRequest = new RetentionLeaseActions.AddRequest( - shardId, - retentionLeaseId, - RetentionLeaseActions.RETAIN_ALL, - retentionLeaseSource - ); - - client.execute(RetentionLeaseActions.ADD, addRetentionLeaseRequest, new RestActionListener<>(channel) { - - @Override - protected void processResponse(final ActionResponse.Empty empty) { - completableFuture.complete(null); - channel.sendResponse( - new RestResponse( - RestStatus.OK, - addRetentionLeaseResponseToXContent( - indexAbstractionName, - concreteIndexName, - shardId, - retentionLeaseId, - retentionLeaseSource - ) - ) - ); - } - }); - } catch (Exception e) { - completableFuture.completeExceptionally( - new ElasticsearchException("Failed to add retention lease for [" + indexAbstractionName + "]", e) - ); - } - return completableFuture; - } - - /** - * Execute an asynchronous task using a task supplier and an executor service. - * - * @param The type of data to be retrieved. - * @param task The supplier task that provides the data. - * @param executorService The {@link ExecutorService} for executing the asynchronous task. - * @param errorMessage The error message to be thrown if the task execution fails. - * @return A {@link CompletableFuture} that completes with the retrieved data. - */ - private static CompletableFuture supplyAsyncTask( - final Supplier task, - final ExecutorService executorService, - final String errorMessage - ) { - return CompletableFuture.supplyAsync(() -> { - try { - return task.get(); - } catch (Exception e) { - throw new ElasticsearchException(errorMessage, e); - } - }, executorService); - } - - /** - * Asynchronously retrieves the index name for a given index, alias or data stream. - * If the name represents a data stream, the name of the first backing index is returned. - * If the name represents an alias, the name of the first index that the alias points to is returned. - * - * @param client The {@link NodeClient} for executing the asynchronous request. - * @param indexAbstractionName The name of the index, alias or data stream. - * @return A {@link CompletableFuture} that completes with the retrieved index name. - */ - private static CompletableFuture asyncGetIndexName( - final NodeClient client, - final String indexAbstractionName, - final ExecutorService executorService - ) { - return supplyAsyncTask(() -> { - final ClusterState clusterState = client.admin() - .cluster() - .prepareState(new TimeValue(DEFAULT_TIMEOUT_SECONDS, TimeUnit.SECONDS)) - .get(GET_INDEX_TIMEOUT) - .getState(); - final IndexAbstraction indexAbstraction = clusterState.metadata().getIndicesLookup().get(indexAbstractionName); - if (indexAbstraction == null) { - throw new IllegalArgumentException( - String.format(Locale.ROOT, "Invalid index or data stream name [%s]", indexAbstractionName) - ); - } - if (indexAbstraction.getType() == IndexAbstraction.Type.DATA_STREAM - || indexAbstraction.getType() == IndexAbstraction.Type.ALIAS) { - return indexAbstraction.getIndices().getFirst().getName(); - } - return indexAbstractionName; - }, executorService, "Error while retrieving index name for or data stream [" + indexAbstractionName + "]"); - } - - /** - * Asynchronously retrieves the shard stats for a given index using an executor service. - * - * @param client The {@link NodeClient} for executing the asynchronous request. - * @param concreteIndexName The name of the index for which to retrieve shard statistics. - * @param executorService The {@link ExecutorService} for executing the asynchronous task. - * @return A {@link CompletableFuture} that completes with the retrieved ShardStats. - * @throws ElasticsearchException If an error occurs while retrieving shard statistics. - */ - private static CompletableFuture asyncShardStats( - final NodeClient client, - final String concreteIndexName, - final ExecutorService executorService - ) { - return supplyAsyncTask( - () -> Arrays.stream(client.admin().indices().prepareStats(concreteIndexName).clear().get(SHARD_STATS_TIMEOUT).getShards()) - .max(Comparator.comparingLong(shardStats -> shardStats.getCommitStats().getGeneration())) - .orElseThrow(() -> new ElasticsearchException("Unable to retrieve shard stats for: " + concreteIndexName)), - executorService, - "Error while retrieving shard stats for [" + concreteIndexName + "]" - ); - } -} From 7f7967bcfab6b5b88abef80efa1e8b9a4a3bad89 Mon Sep 17 00:00:00 2001 From: Salvatore Campagna <93581129+salvatore-campagna@users.noreply.github.com> Date: Tue, 18 Feb 2025 12:32:20 +0100 Subject: [PATCH 05/29] Remove `TransportDownsampleIndexerAction` (#122756) --- .../xpack/downsample/Downsample.java | 2 - .../xpack/downsample/DownsampleMetrics.java | 2 - .../downsample/TransportDownsampleAction.java | 2 +- .../TransportDownsampleIndexerAction.java | 201 ------------------ .../xpack/security/operator/Constants.java | 1 - 5 files changed, 1 insertion(+), 207 deletions(-) delete mode 100644 x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleIndexerAction.java diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/Downsample.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/Downsample.java index 7dcda9c2b003..84b0ea4a5937 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/Downsample.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/Downsample.java @@ -35,7 +35,6 @@ import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xpack.core.downsample.DownsampleIndexerAction; import org.elasticsearch.xpack.core.downsample.DownsampleShardPersistentTaskState; import org.elasticsearch.xpack.core.downsample.DownsampleShardTask; @@ -66,7 +65,6 @@ public class Downsample extends Plugin implements ActionPlugin, PersistentTaskPl @Override public List> getActions() { return List.of( - new ActionHandler<>(DownsampleIndexerAction.INSTANCE, TransportDownsampleIndexerAction.class), new ActionHandler<>(DownsampleAction.INSTANCE, TransportDownsampleAction.class), new ActionHandler<>( DownsampleShardPersistentTaskExecutor.DelegatingAction.INSTANCE, diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleMetrics.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleMetrics.java index b5ac4b0ae37a..148a32851b0a 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleMetrics.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleMetrics.java @@ -25,8 +25,6 @@ import java.util.Map; * - Add a constant for its name, following the naming conventions for metrics. * - Register it in method {@link #doStart}. * - Add a function for recording its value. - * - If needed, inject {@link DownsampleMetrics} to the action containing the logic - * that records the metric value. For reference, see {@link TransportDownsampleIndexerAction}. */ public class DownsampleMetrics extends AbstractLifecycleComponent { diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java index 2c08dcd9017f..4aaead13f753 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java @@ -103,7 +103,7 @@ import static org.elasticsearch.xpack.core.ilm.DownsampleAction.DOWNSAMPLED_INDE /** * The master downsample action that coordinates * - creating the downsample index - * - instantiating {@link DownsampleShardIndexer}s to index downsample documents + * - instantiating {@link org.elasticsearch.persistent.PersistentTasksExecutor} to start a persistent downsample task * - cleaning up state */ public class TransportDownsampleAction extends AcknowledgedTransportMasterNodeAction { diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleIndexerAction.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleIndexerAction.java deleted file mode 100644 index 8c396c493495..000000000000 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleIndexerAction.java +++ /dev/null @@ -1,201 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.downsample; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.NoShardAvailableActionException; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.client.internal.OriginSettingClient; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.block.ClusterBlockException; -import org.elasticsearch.cluster.block.ClusterBlockLevel; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.routing.ShardIterator; -import org.elasticsearch.cluster.routing.ShardRouting; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.indices.IndicesService; -import org.elasticsearch.injection.guice.Inject; -import org.elasticsearch.tasks.Task; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.ClientHelper; -import org.elasticsearch.xpack.core.downsample.DownsampleIndexerAction; -import org.elasticsearch.xpack.core.downsample.DownsampleShardIndexerStatus; -import org.elasticsearch.xpack.core.downsample.DownsampleShardPersistentTaskState; -import org.elasticsearch.xpack.core.downsample.DownsampleShardTask; - -import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.concurrent.atomic.AtomicReferenceArray; - -/** - * A {@link TransportBroadcastAction} that downsamples all the shards of a source index into a new downsample index. - * - * TODO: Enforce that we don't retry on another replica if we throw an error after sending some buckets. - */ -public class TransportDownsampleIndexerAction extends TransportBroadcastAction< - DownsampleIndexerAction.Request, - DownsampleIndexerAction.Response, - DownsampleIndexerAction.ShardDownsampleRequest, - DownsampleIndexerAction.ShardDownsampleResponse> { - - private final Client client; - private final IndicesService indicesService; - - private final DownsampleMetrics downsampleMetrics; - - @Inject - public TransportDownsampleIndexerAction( - Client client, - ClusterService clusterService, - TransportService transportService, - IndicesService indicesService, - ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, - DownsampleMetrics downsampleMetrics - ) { - super( - DownsampleIndexerAction.NAME, - clusterService, - transportService, - actionFilters, - indexNameExpressionResolver, - DownsampleIndexerAction.Request::new, - DownsampleIndexerAction.ShardDownsampleRequest::new, - transportService.getThreadPool().executor(Downsample.DOWNSAMPLE_TASK_THREAD_POOL_NAME) - ); - this.client = new OriginSettingClient(client, ClientHelper.ROLLUP_ORIGIN); - this.indicesService = indicesService; - this.downsampleMetrics = downsampleMetrics; - } - - @Override - protected List shards(ClusterState clusterState, DownsampleIndexerAction.Request request, String[] concreteIndices) { - if (concreteIndices.length > 1) { - throw new IllegalArgumentException("multiple indices: " + Arrays.toString(concreteIndices)); - } - - final List groups = clusterService.operationRouting().searchShards(clusterState, concreteIndices, null, null); - for (ShardIterator group : groups) { - // fails fast if any non-active groups - if (group.size() == 0) { - throw new NoShardAvailableActionException(group.shardId()); - } - } - return groups; - } - - @Override - protected ClusterBlockException checkGlobalBlock(ClusterState state, DownsampleIndexerAction.Request request) { - return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); - } - - @Override - protected ClusterBlockException checkRequestBlock( - ClusterState state, - DownsampleIndexerAction.Request request, - String[] concreteIndices - ) { - return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_WRITE, concreteIndices); - } - - @Override - protected void doExecute( - Task task, - DownsampleIndexerAction.Request request, - ActionListener listener - ) { - new Async(task, request, listener).start(); - } - - @Override - protected DownsampleIndexerAction.ShardDownsampleRequest newShardRequest( - int numShards, - ShardRouting shard, - DownsampleIndexerAction.Request request - ) { - return new DownsampleIndexerAction.ShardDownsampleRequest(shard.shardId(), request); - } - - @Override - protected DownsampleIndexerAction.ShardDownsampleResponse shardOperation( - DownsampleIndexerAction.ShardDownsampleRequest request, - Task task - ) throws IOException { - IndexService indexService = indicesService.indexService(request.shardId().getIndex()); - DownsampleShardIndexer indexer = new DownsampleShardIndexer( - (DownsampleShardTask) task, - client, - indexService, - downsampleMetrics, - request.shardId(), - request.getDownsampleIndex(), - request.getRollupConfig(), - request.getMetricFields(), - request.getLabelFields(), - request.getDimensionFields(), - new DownsampleShardPersistentTaskState(DownsampleShardIndexerStatus.INITIALIZED, null) - ); - return indexer.execute(); - } - - @Override - protected DownsampleIndexerAction.ShardDownsampleResponse readShardResponse(StreamInput in) throws IOException { - return new DownsampleIndexerAction.ShardDownsampleResponse(in); - } - - @Override - protected DownsampleIndexerAction.Response newResponse( - DownsampleIndexerAction.Request request, - AtomicReferenceArray shardsResponses, - ClusterState clusterState - ) { - long numIndexed = 0; - int successfulShards = 0; - for (int i = 0; i < shardsResponses.length(); i++) { - Object shardResponse = shardsResponses.get(i); - if (shardResponse == null) { - throw new ElasticsearchException("missing shard"); - } else if (shardResponse instanceof DownsampleIndexerAction.ShardDownsampleResponse r) { - successfulShards++; - numIndexed += r.getNumIndexed(); - } else if (shardResponse instanceof Exception e) { - throw new ElasticsearchException(e); - } else { - assert false : "unknown response [" + shardResponse + "]"; - throw new IllegalStateException("unknown response [" + shardResponse + "]"); - } - } - return new DownsampleIndexerAction.Response(true, shardsResponses.length(), successfulShards, 0, numIndexed); - } - - private class Async extends AsyncBroadcastAction { - private final DownsampleIndexerAction.Request request; - private final ActionListener listener; - - protected Async(Task task, DownsampleIndexerAction.Request request, ActionListener listener) { - super(task, request, listener); - this.request = request; - this.listener = listener; - } - - @Override - protected void finishHim() { - try { - DownsampleIndexerAction.Response resp = newResponse(request, shardsResponses, clusterService.state()); - listener.onResponse(resp); - } catch (Exception e) { - listener.onFailure(e); - } - } - } -} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 6ea522a4276a..4c9a5bee5577 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -556,7 +556,6 @@ public class Constants { "indices:admin/xpack/ccr/put_follow", "indices:admin/xpack/ccr/unfollow", "indices:admin/xpack/downsample", - "indices:admin/xpack/downsample_indexer", "indices:data/read/downsample_delegate", "indices:data/read/async_search/delete", "indices:data/read/async_search/get", From 87c58ff93f809227888af4ec912935e1515bed31 Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Tue, 18 Feb 2025 13:32:05 +0100 Subject: [PATCH 06/29] [Entitlements] Add missing entitlements for trust store (#122797) Add missing entitlements for trust store if running in fips mode. Fixes #122546, fixes #122569, fixes #122568, fixes #122680, fixes #122566 --- .../EntitlementInitialization.java | 144 ++++++++++-------- muted-tests.yml | 10 -- .../plugin-metadata/entitlement-policy.yaml | 3 + 3 files changed, 82 insertions(+), 75 deletions(-) diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java index 93b417e732a6..aee731cdd762 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java @@ -53,6 +53,7 @@ import java.nio.file.attribute.FileAttribute; import java.nio.file.spi.FileSystemProvider; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -137,76 +138,84 @@ public class EntitlementInitialization { var pathLookup = new PathLookup(getUserHome(), bootstrapArgs.configDir(), bootstrapArgs.dataDirs(), bootstrapArgs.tempDir()); Path logsDir = EntitlementBootstrap.bootstrapArgs().logsDir(); - // TODO(ES-10031): Decide what goes in the elasticsearch default policy and extend it - var serverPolicy = new Policy( - "server", - List.of( - new Scope("org.elasticsearch.base", List.of(new CreateClassLoaderEntitlement())), - new Scope("org.elasticsearch.xcontent", List.of(new CreateClassLoaderEntitlement())), - new Scope( - "org.elasticsearch.server", - List.of( - new ExitVMEntitlement(), - new ReadStoreAttributesEntitlement(), - new CreateClassLoaderEntitlement(), - new InboundNetworkEntitlement(), - new OutboundNetworkEntitlement(), - new LoadNativeLibrariesEntitlement(), - new ManageThreadsEntitlement(), - new FilesEntitlement( - Stream.concat( - Stream.of( - FileData.ofPath(bootstrapArgs.tempDir(), READ_WRITE), - FileData.ofPath(bootstrapArgs.configDir(), READ), - FileData.ofPath(bootstrapArgs.logsDir(), READ_WRITE), - // OS release on Linux - FileData.ofPath(Path.of("/etc/os-release"), READ), - FileData.ofPath(Path.of("/etc/system-release"), READ), - FileData.ofPath(Path.of("/usr/lib/os-release"), READ), - // read max virtual memory areas - FileData.ofPath(Path.of("/proc/sys/vm/max_map_count"), READ), - FileData.ofPath(Path.of("/proc/meminfo"), READ), - // load averages on Linux - FileData.ofPath(Path.of("/proc/loadavg"), READ), - // control group stats on Linux. cgroup v2 stats are in an unpredicable - // location under `/sys/fs/cgroup`, so unfortunately we have to allow - // read access to the entire directory hierarchy. - FileData.ofPath(Path.of("/proc/self/cgroup"), READ), - FileData.ofPath(Path.of("/sys/fs/cgroup/"), READ), - // // io stats on Linux - FileData.ofPath(Path.of("/proc/self/mountinfo"), READ), - FileData.ofPath(Path.of("/proc/diskstats"), READ) - ), - Arrays.stream(bootstrapArgs.dataDirs()).map(d -> FileData.ofPath(d, READ)) - ).toList() - ) + List serverScopes = new ArrayList<>(); + Collections.addAll( + serverScopes, + new Scope("org.elasticsearch.base", List.of(new CreateClassLoaderEntitlement())), + new Scope("org.elasticsearch.xcontent", List.of(new CreateClassLoaderEntitlement())), + new Scope( + "org.elasticsearch.server", + List.of( + new ExitVMEntitlement(), + new ReadStoreAttributesEntitlement(), + new CreateClassLoaderEntitlement(), + new InboundNetworkEntitlement(), + new OutboundNetworkEntitlement(), + new LoadNativeLibrariesEntitlement(), + new ManageThreadsEntitlement(), + new FilesEntitlement( + Stream.concat( + Stream.of( + FileData.ofPath(bootstrapArgs.tempDir(), READ_WRITE), + FileData.ofPath(bootstrapArgs.configDir(), READ), + FileData.ofPath(bootstrapArgs.logsDir(), READ_WRITE), + // OS release on Linux + FileData.ofPath(Path.of("/etc/os-release"), READ), + FileData.ofPath(Path.of("/etc/system-release"), READ), + FileData.ofPath(Path.of("/usr/lib/os-release"), READ), + // read max virtual memory areas + FileData.ofPath(Path.of("/proc/sys/vm/max_map_count"), READ), + FileData.ofPath(Path.of("/proc/meminfo"), READ), + // load averages on Linux + FileData.ofPath(Path.of("/proc/loadavg"), READ), + // control group stats on Linux. cgroup v2 stats are in an unpredicable + // location under `/sys/fs/cgroup`, so unfortunately we have to allow + // read access to the entire directory hierarchy. + FileData.ofPath(Path.of("/proc/self/cgroup"), READ), + FileData.ofPath(Path.of("/sys/fs/cgroup/"), READ), + // // io stats on Linux + FileData.ofPath(Path.of("/proc/self/mountinfo"), READ), + FileData.ofPath(Path.of("/proc/diskstats"), READ) + ), + Arrays.stream(bootstrapArgs.dataDirs()).map(d -> FileData.ofPath(d, READ)) + ).toList() ) - ), - new Scope("org.apache.httpcomponents.httpclient", List.of(new OutboundNetworkEntitlement())), - new Scope("io.netty.transport", List.of(new InboundNetworkEntitlement(), new OutboundNetworkEntitlement())), - new Scope( - "org.apache.lucene.core", - List.of( - new LoadNativeLibrariesEntitlement(), - new ManageThreadsEntitlement(), - new FilesEntitlement( - Stream.concat( - Stream.of(FileData.ofPath(bootstrapArgs.configDir(), READ)), - Arrays.stream(bootstrapArgs.dataDirs()).map(d -> FileData.ofPath(d, READ_WRITE)) - ).toList() - ) - ) - ), - new Scope("org.apache.logging.log4j.core", List.of(new ManageThreadsEntitlement())), - new Scope( - "org.elasticsearch.nativeaccess", - List.of( - new LoadNativeLibrariesEntitlement(), - new FilesEntitlement(List.of(FileData.ofRelativePath(Path.of(""), FilesEntitlement.BaseDir.DATA, READ_WRITE))) + ) + ), + new Scope("org.apache.httpcomponents.httpclient", List.of(new OutboundNetworkEntitlement())), + new Scope("io.netty.transport", List.of(new InboundNetworkEntitlement(), new OutboundNetworkEntitlement())), + new Scope( + "org.apache.lucene.core", + List.of( + new LoadNativeLibrariesEntitlement(), + new ManageThreadsEntitlement(), + new FilesEntitlement( + Stream.concat( + Stream.of(FileData.ofPath(bootstrapArgs.configDir(), READ)), + Arrays.stream(bootstrapArgs.dataDirs()).map(d -> FileData.ofPath(d, READ_WRITE)) + ).toList() ) ) + ), + new Scope("org.apache.logging.log4j.core", List.of(new ManageThreadsEntitlement())), + new Scope( + "org.elasticsearch.nativeaccess", + List.of( + new LoadNativeLibrariesEntitlement(), + new FilesEntitlement(List.of(FileData.ofRelativePath(Path.of(""), FilesEntitlement.BaseDir.DATA, READ_WRITE))) + ) ) ); + + Path trustStorePath = trustStorePath(); + if (trustStorePath != null) { + serverScopes.add( + new Scope("org.bouncycastle.fips.tls", List.of(new FilesEntitlement(List.of(FileData.ofPath(trustStorePath, READ))))) + ); + } + + // TODO(ES-10031): Decide what goes in the elasticsearch default policy and extend it + var serverPolicy = new Policy("server", serverScopes); // agents run without a module, so this is a special hack for the apm agent // this should be removed once https://github.com/elastic/elasticsearch/issues/109335 is completed List agentEntitlements = List.of(new CreateClassLoaderEntitlement(), new ManageThreadsEntitlement()); @@ -230,6 +239,11 @@ public class EntitlementInitialization { return PathUtils.get(userHome); } + private static Path trustStorePath() { + String trustStore = System.getProperty("javax.net.ssl.trustStore"); + return trustStore != null ? Path.of(trustStore) : null; + } + private static Stream fileSystemProviderChecks() throws ClassNotFoundException, NoSuchMethodException { var fileSystemProviderClass = FileSystems.getDefault().provider().getClass(); diff --git a/muted-tests.yml b/muted-tests.yml index 72b01ef7345d..df74656dada0 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -312,8 +312,6 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/122377 - class: org.elasticsearch.repositories.blobstore.testkit.analyze.HdfsRepositoryAnalysisRestIT issue: https://github.com/elastic/elasticsearch/issues/122378 -- class: org.elasticsearch.telemetry.apm.ApmAgentSettingsIT - issue: https://github.com/elastic/elasticsearch/issues/122546 - class: org.elasticsearch.xpack.inference.mapper.SemanticInferenceMetadataFieldsRecoveryTests method: testSnapshotRecovery {p0=false p1=false} issue: https://github.com/elastic/elasticsearch/issues/122549 @@ -386,14 +384,6 @@ tests: - class: org.elasticsearch.indices.recovery.IndexRecoveryIT method: testSourceThrottling issue: https://github.com/elastic/elasticsearch/issues/122712 -- class: org.elasticsearch.entitlement.qa.EntitlementsDeniedNonModularIT - issue: https://github.com/elastic/elasticsearch/issues/122569 -- class: org.elasticsearch.entitlement.qa.EntitlementsAllowedNonModularIT - issue: https://github.com/elastic/elasticsearch/issues/122568 -- class: org.elasticsearch.entitlement.qa.EntitlementsAllowedIT - issue: https://github.com/elastic/elasticsearch/issues/122680 -- class: org.elasticsearch.entitlement.qa.EntitlementsDeniedIT - issue: https://github.com/elastic/elasticsearch/issues/122566 - class: org.elasticsearch.xpack.esql.action.EsqlActionBreakerIT issue: https://github.com/elastic/elasticsearch/issues/122810 diff --git a/x-pack/plugin/security/src/main/plugin-metadata/entitlement-policy.yaml b/x-pack/plugin/security/src/main/plugin-metadata/entitlement-policy.yaml index 90367da4cbce..1897e826313a 100644 --- a/x-pack/plugin/security/src/main/plugin-metadata/entitlement-policy.yaml +++ b/x-pack/plugin/security/src/main/plugin-metadata/entitlement-policy.yaml @@ -28,3 +28,6 @@ org.opensaml.saml.impl: - relative_path: saml-metadata.xml relative_to: config mode: read + - relative_path: metadata.xml + relative_to: config + mode: read From e16664573e752749928e682de26a7518679ae0bd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20Fred=C3=A9n?= <109296772+jfreden@users.noreply.github.com> Date: Tue, 18 Feb 2025 13:55:17 +0100 Subject: [PATCH 07/29] Bump json-smart and oauth2-oidc-sdk (#122737) * Bump json-smart and oauth2-oidc-sdk --------- Co-authored-by: elasticsearchmachine --- docs/changelog/122737.yaml | 5 ++ gradle/verification-metadata.xml | 80 ++++--------------- modules/repository-azure/build.gradle | 15 ++-- x-pack/plugin/security/build.gradle | 12 +-- .../build.gradle | 2 +- .../lib/nimbus-jose-jwt-modified/build.gradle | 2 +- .../nimbusds/jose/util/JSONObjectUtils.java | 11 +++ 7 files changed, 43 insertions(+), 84 deletions(-) create mode 100644 docs/changelog/122737.yaml diff --git a/docs/changelog/122737.yaml b/docs/changelog/122737.yaml new file mode 100644 index 000000000000..97128009e5fe --- /dev/null +++ b/docs/changelog/122737.yaml @@ -0,0 +1,5 @@ +pr: 122737 +summary: Bump json-smart and oauth2-oidc-sdk +area: Authentication +type: upgrade +issues: [] diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index eb5776d0e96e..3e8810cb5ef0 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -984,36 +984,19 @@ + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - + + + @@ -1779,9 +1762,9 @@ - - - + + + @@ -1789,24 +1772,14 @@ - - - - - - - - - - - - - + + + @@ -4408,31 +4381,6 @@ - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/modules/repository-azure/build.gradle b/modules/repository-azure/build.gradle index ded138efb4af..025c494b0c76 100644 --- a/modules/repository-azure/build.gradle +++ b/modules/repository-azure/build.gradle @@ -63,20 +63,20 @@ dependencies { api "com.github.stephenc.jcip:jcip-annotations:1.0-1" api "com.nimbusds:content-type:2.3" api "com.nimbusds:lang-tag:1.7" - api("com.nimbusds:nimbus-jose-jwt:9.37.3"){ + api("com.nimbusds:nimbus-jose-jwt:10.0.1"){ exclude group: 'com.google.crypto.tink', module: 'tink' // it's an optional dependency on which we don't rely } - api("com.nimbusds:oauth2-oidc-sdk:11.9.1"){ + api("com.nimbusds:oauth2-oidc-sdk:11.22.2"){ exclude group: 'com.google.crypto.tink', module: 'tink' // it's an optional dependency on which we don't rely } api "jakarta.activation:jakarta.activation-api:1.2.1" api "jakarta.xml.bind:jakarta.xml.bind-api:2.3.3" api "net.java.dev.jna:jna-platform:${versions.jna}" // Maven says 5.14.0 but this aligns with the Elasticsearch-wide version api "net.java.dev.jna:jna:${versions.jna}" // Maven says 5.14.0 but this aligns with the Elasticsearch-wide version - api "net.minidev:accessors-smart:2.5.0" - api "net.minidev:json-smart:2.5.0" + api "net.minidev:accessors-smart:2.5.2" + api "net.minidev:json-smart:2.5.2" api "org.codehaus.woodstox:stax2-api:4.2.2" - api "org.ow2.asm:asm:9.3" + api "org.ow2.asm:asm:9.7.1" runtimeOnly "com.google.code.gson:gson:2.11.0" runtimeOnly "org.cryptomator:siv-mode:1.5.2" @@ -190,11 +190,6 @@ tasks.named("thirdPartyAudit").configure { 'org.bouncycastle.cert.X509CertificateHolder', 'org.bouncycastle.cert.jcajce.JcaX509CertificateHolder', 'org.bouncycastle.cert.jcajce.JcaX509v3CertificateBuilder', - 'org.bouncycastle.crypto.InvalidCipherTextException', - 'org.bouncycastle.crypto.engines.AESEngine', - 'org.bouncycastle.crypto.modes.GCMBlockCipher', - 'org.bouncycastle.jcajce.provider.BouncyCastleFipsProvider', - 'org.bouncycastle.jce.provider.BouncyCastleProvider', 'org.bouncycastle.openssl.PEMKeyPair', 'org.bouncycastle.openssl.PEMParser', 'org.bouncycastle.openssl.jcajce.JcaPEMKeyConverter', diff --git a/x-pack/plugin/security/build.gradle b/x-pack/plugin/security/build.gradle index d3425c1e42e5..eef4ca6d523d 100644 --- a/x-pack/plugin/security/build.gradle +++ b/x-pack/plugin/security/build.gradle @@ -79,21 +79,21 @@ dependencies { runtimeOnly "joda-time:joda-time:2.10.10" // Dependencies for oidc - api "com.nimbusds:oauth2-oidc-sdk:11.10.1" + api "com.nimbusds:oauth2-oidc-sdk:11.22.2" api project(path: xpackModule('security:lib:nimbus-jose-jwt-modified'), configuration: 'shadow') if (isEclipse) { /* * Eclipse can't pick up the shadow dependency so we point it at the unmodified version of the library * so it can compile things. */ - api "com.nimbusds:nimbus-jose-jwt:9.37.3" + api "com.nimbusds:nimbus-jose-jwt:10.0.1" } - api "com.nimbusds:lang-tag:1.4.4" + api "com.nimbusds:lang-tag:1.7" api "com.sun.mail:jakarta.mail:1.6.3" api "net.jcip:jcip-annotations:1.0" - api "net.minidev:json-smart:2.5.1" - api "net.minidev:accessors-smart:2.4.2" - api "org.ow2.asm:asm:8.0.1" + api "net.minidev:json-smart:2.5.2" + api "net.minidev:accessors-smart:2.5.2" + api "org.ow2.asm:asm:9.7.1" testImplementation "org.elasticsearch:mocksocket:${versions.mocksocket}" diff --git a/x-pack/plugin/security/lib/nimbus-jose-jwt-modified-part1/build.gradle b/x-pack/plugin/security/lib/nimbus-jose-jwt-modified-part1/build.gradle index f53ff7027f12..d7a32bca6e52 100644 --- a/x-pack/plugin/security/lib/nimbus-jose-jwt-modified-part1/build.gradle +++ b/x-pack/plugin/security/lib/nimbus-jose-jwt-modified-part1/build.gradle @@ -11,7 +11,7 @@ apply plugin: 'com.gradleup.shadow' // See the build.gradle file in the parent directory for an explanation of this unusual build dependencies { - implementation "com.nimbusds:nimbus-jose-jwt:9.37.3" + implementation "com.nimbusds:nimbus-jose-jwt:10.0.1" } tasks.named('shadowJar').configure { diff --git a/x-pack/plugin/security/lib/nimbus-jose-jwt-modified/build.gradle b/x-pack/plugin/security/lib/nimbus-jose-jwt-modified/build.gradle index 4418bd32e64c..f618f96706d6 100644 --- a/x-pack/plugin/security/lib/nimbus-jose-jwt-modified/build.gradle +++ b/x-pack/plugin/security/lib/nimbus-jose-jwt-modified/build.gradle @@ -11,7 +11,7 @@ apply plugin: 'com.gradleup.shadow' // See the build.gradle file in the parent directory for an explanation of this unusual build dependencies { - implementation "com.nimbusds:nimbus-jose-jwt:9.37.3" + implementation "com.nimbusds:nimbus-jose-jwt:10.0.1" implementation project(path: xpackModule('security:lib:nimbus-jose-jwt-modified-part2'), configuration: 'shadow') } diff --git a/x-pack/plugin/security/lib/nimbus-jose-jwt-modified/src/main/java/com/nimbusds/jose/util/JSONObjectUtils.java b/x-pack/plugin/security/lib/nimbus-jose-jwt-modified/src/main/java/com/nimbusds/jose/util/JSONObjectUtils.java index 1ea11f5c280e..34b61e612c74 100644 --- a/x-pack/plugin/security/lib/nimbus-jose-jwt-modified/src/main/java/com/nimbusds/jose/util/JSONObjectUtils.java +++ b/x-pack/plugin/security/lib/nimbus-jose-jwt-modified/src/main/java/com/nimbusds/jose/util/JSONObjectUtils.java @@ -13,6 +13,7 @@ import java.security.PrivilegedAction; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import java.text.ParseException; +import java.util.Date; import java.util.List; import java.util.Map; @@ -192,6 +193,16 @@ public class JSONObjectUtils { } } + public static Date getEpochSecondAsDate(final Map o, final String key) throws ParseException { + try { + return AccessController.doPrivileged( + (PrivilegedExceptionAction) () -> org.elasticsearch.nimbus.jose.util.JSONObjectUtils.getEpochSecondAsDate(o, key) + ); + } catch (PrivilegedActionException e) { + throw (ParseException) e.getException(); + } + } + public static String toJSONString(final Map o) { return AccessController.doPrivileged( (PrivilegedAction) () -> org.elasticsearch.nimbus.jose.util.JSONObjectUtils.toJSONString(o) From d78bfe8f6e55be641d3a40919829a74ab947800d Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Tue, 18 Feb 2025 14:09:59 +0100 Subject: [PATCH 08/29] Skip SM policy tests for JDK 24 when Security Manager is not available anymore (#122828) --- .../org/elasticsearch/bootstrap/ESPolicyUnitTests.java | 2 ++ .../java/org/elasticsearch/bootstrap/PolicyUtilTests.java | 2 ++ .../org/elasticsearch/plugins/cli/PluginSecurityTests.java | 7 +++++++ 3 files changed, 11 insertions(+) diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/ESPolicyUnitTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/ESPolicyUnitTests.java index 2510208a496a..34d15fa4aebb 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/ESPolicyUnitTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/ESPolicyUnitTests.java @@ -10,6 +10,7 @@ package org.elasticsearch.bootstrap; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.jdk.RuntimeVersionFeature; import org.elasticsearch.test.ESTestCase; import org.junit.BeforeClass; @@ -42,6 +43,7 @@ public class ESPolicyUnitTests extends ESTestCase { @BeforeClass public static void setupPolicy() { + assumeTrue("test requires security manager to be supported", RuntimeVersionFeature.isSecurityManagerAvailable()); assumeTrue("test cannot run with security manager", System.getSecurityManager() == null); DEFAULT_POLICY = PolicyUtil.readPolicy(ESPolicy.class.getResource(POLICY_RESOURCE), TEST_CODEBASES); } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/PolicyUtilTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/PolicyUtilTests.java index d8352669083d..5a21cda02e7f 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/PolicyUtilTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/PolicyUtilTests.java @@ -10,6 +10,7 @@ package org.elasticsearch.bootstrap; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.jdk.RuntimeVersionFeature; import org.elasticsearch.plugins.PluginDescriptor; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -40,6 +41,7 @@ public class PolicyUtilTests extends ESTestCase { @Before public void assumeSecurityManagerDisabled() { + assumeTrue("test requires security manager to be supported", RuntimeVersionFeature.isSecurityManagerAvailable()); assumeTrue("test cannot run with security manager enabled", System.getSecurityManager() == null); } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/cli/PluginSecurityTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/cli/PluginSecurityTests.java index f40c0707db7f..52a712518357 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/cli/PluginSecurityTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/cli/PluginSecurityTests.java @@ -11,8 +11,10 @@ package org.elasticsearch.plugins.cli; import org.elasticsearch.bootstrap.PluginPolicyInfo; import org.elasticsearch.bootstrap.PolicyUtil; +import org.elasticsearch.jdk.RuntimeVersionFeature; import org.elasticsearch.plugins.PluginDescriptor; import org.elasticsearch.test.ESTestCase; +import org.junit.Before; import java.io.IOException; import java.nio.file.Files; @@ -26,6 +28,11 @@ import static org.hamcrest.Matchers.containsInAnyOrder; /** Tests plugin manager security check */ public class PluginSecurityTests extends ESTestCase { + @Before + public void assumeSecurityManagerSupported() { + assumeTrue("test requires security manager to be supported", RuntimeVersionFeature.isSecurityManagerAvailable()); + } + PluginPolicyInfo makeDummyPlugin(String policy, String... files) throws IOException { Path plugin = createTempDir(); Files.copy(this.getDataPath(policy), plugin.resolve(PluginDescriptor.ES_PLUGIN_POLICY)); From 8eb89cf7c1ebb178d90d916578142b1c5cf4cf4f Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Tue, 18 Feb 2025 14:12:01 +0100 Subject: [PATCH 09/29] Add entitlement checks for java.io stream classes (#122406) --- .../bridge/EntitlementChecker.java | 41 +++++++ .../entitlement/qa/test/FileCheckActions.java | 111 ++++++++++++++++++ .../api/ElasticsearchEntitlementChecker.java | 109 +++++++++++++++++ .../runtime/policy/PolicyManager.java | 8 ++ .../plugin-metadata/entitlement-policy.yaml | 5 + .../plugin-metadata/entitlement-policy.yaml | 4 + .../plugin-metadata/entitlement-policy.yaml | 5 + .../plugin-metadata/entitlement-policy.yaml | 8 ++ 8 files changed, 291 insertions(+) create mode 100644 modules/ingest-geoip/src/main/plugin-metadata/entitlement-policy.yaml create mode 100644 x-pack/plugin/blob-cache/src/main/plugin-metadata/entitlement-policy.yaml diff --git a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java index 640e0d06cc4b..f113960be927 100644 --- a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java +++ b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java @@ -10,6 +10,7 @@ package org.elasticsearch.entitlement.bridge; import java.io.File; +import java.io.FileDescriptor; import java.io.FileFilter; import java.io.FilenameFilter; import java.io.InputStream; @@ -572,14 +573,54 @@ public interface EntitlementChecker { void check$java_io_File$setWritable(Class callerClass, File file, boolean writable, boolean ownerOnly); + void check$java_io_FileInputStream$(Class callerClass, File file); + + void check$java_io_FileInputStream$(Class callerClass, FileDescriptor fd); + + void check$java_io_FileInputStream$(Class callerClass, String name); + void check$java_io_FileOutputStream$(Class callerClass, File file); void check$java_io_FileOutputStream$(Class callerClass, File file, boolean append); + void check$java_io_FileOutputStream$(Class callerClass, FileDescriptor fd); + void check$java_io_FileOutputStream$(Class callerClass, String name); void check$java_io_FileOutputStream$(Class callerClass, String name, boolean append); + void check$java_io_FileReader$(Class callerClass, File file); + + void check$java_io_FileReader$(Class callerClass, File file, Charset charset); + + void check$java_io_FileReader$(Class callerClass, FileDescriptor fd); + + void check$java_io_FileReader$(Class callerClass, String name); + + void check$java_io_FileReader$(Class callerClass, String name, Charset charset); + + void check$java_io_FileWriter$(Class callerClass, File file); + + void check$java_io_FileWriter$(Class callerClass, File file, boolean append); + + void check$java_io_FileWriter$(Class callerClass, File file, Charset charset); + + void check$java_io_FileWriter$(Class callerClass, File file, Charset charset, boolean append); + + void check$java_io_FileWriter$(Class callerClass, FileDescriptor fd); + + void check$java_io_FileWriter$(Class callerClass, String name); + + void check$java_io_FileWriter$(Class callerClass, String name, boolean append); + + void check$java_io_FileWriter$(Class callerClass, String name, Charset charset); + + void check$java_io_FileWriter$(Class callerClass, String name, Charset charset, boolean append); + + void check$java_io_RandomAccessFile$(Class callerClass, String name, String mode); + + void check$java_io_RandomAccessFile$(Class callerClass, File file, String mode); + void check$java_util_Scanner$(Class callerClass, File source); void check$java_util_Scanner$(Class callerClass, File source, String charsetName); diff --git a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileCheckActions.java b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileCheckActions.java index 9a6c59e20472..bedb8790c1ad 100644 --- a/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileCheckActions.java +++ b/libs/entitlement/qa/entitlement-test-plugin/src/main/java/org/elasticsearch/entitlement/qa/test/FileCheckActions.java @@ -13,9 +13,14 @@ import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.entitlement.qa.entitled.EntitledActions; import java.io.File; +import java.io.FileDescriptor; +import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; +import java.io.FileReader; +import java.io.FileWriter; import java.io.IOException; +import java.io.RandomAccessFile; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; @@ -23,6 +28,7 @@ import java.nio.file.Paths; import java.nio.file.attribute.UserPrincipal; import java.util.Scanner; +import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.ALWAYS_DENIED; import static org.elasticsearch.entitlement.qa.test.EntitlementTest.ExpectedAccess.PLUGINS; @SuppressForbidden(reason = "Explicitly checking APIs that are forbidden") @@ -216,6 +222,21 @@ class FileCheckActions { new Scanner(readFile().toFile(), "UTF-8"); } + @EntitlementTest(expectedAccess = PLUGINS) + static void createFileInputStreamFile() throws IOException { + new FileInputStream(readFile().toFile()).close(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createFileInputStreamFileDescriptor() throws IOException { + new FileInputStream(FileDescriptor.in).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void createFileInputStreamString() throws IOException { + new FileInputStream(readFile().toString()).close(); + } + @EntitlementTest(expectedAccess = PLUGINS) static void createFileOutputStreamString() throws IOException { new FileOutputStream(readWriteFile().toString()).close(); @@ -236,6 +257,96 @@ class FileCheckActions { new FileOutputStream(readWriteFile().toFile(), false).close(); } + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createFileOutputStreamFileDescriptor() throws IOException { + new FileOutputStream(FileDescriptor.out).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void createFileReaderFile() throws IOException { + new FileReader(readFile().toFile()).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void createFileReaderFileCharset() throws IOException { + new FileReader(readFile().toFile(), StandardCharsets.UTF_8).close(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createFileReaderFileDescriptor() throws IOException { + new FileReader(FileDescriptor.in).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void createFileReaderString() throws IOException { + new FileReader(readFile().toString()).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void createFileReaderStringCharset() throws IOException { + new FileReader(readFile().toString(), StandardCharsets.UTF_8).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void createFileWriterFile() throws IOException { + new FileWriter(readWriteFile().toFile()).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void createFileWriterFileWithAppend() throws IOException { + new FileWriter(readWriteFile().toFile(), false).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void createFileWriterFileCharsetWithAppend() throws IOException { + new FileWriter(readWriteFile().toFile(), StandardCharsets.UTF_8, false).close(); + } + + @EntitlementTest(expectedAccess = ALWAYS_DENIED) + static void createFileWriterFileDescriptor() throws IOException { + new FileWriter(FileDescriptor.out).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void createFileWriterString() throws IOException { + new FileWriter(readWriteFile().toString()).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void createFileWriterStringWithAppend() throws IOException { + new FileWriter(readWriteFile().toString(), false).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void createFileWriterStringCharset() throws IOException { + new FileWriter(readWriteFile().toString(), StandardCharsets.UTF_8).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void createFileWriterStringCharsetWithAppend() throws IOException { + new FileWriter(readWriteFile().toString(), StandardCharsets.UTF_8, false).close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void createRandomAccessFileStringRead() throws IOException { + new RandomAccessFile(readFile().toString(), "r").close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void createRandomAccessFileStringReadWrite() throws IOException { + new RandomAccessFile(readWriteFile().toString(), "rw").close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void createRandomAccessFileRead() throws IOException { + new RandomAccessFile(readFile().toFile(), "r").close(); + } + + @EntitlementTest(expectedAccess = PLUGINS) + static void createRandomAccessFileReadWrite() throws IOException { + new RandomAccessFile(readWriteFile().toFile(), "rw").close(); + } + @EntitlementTest(expectedAccess = PLUGINS) static void filesGetOwner() throws IOException { Files.getOwner(readFile()); diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java index aba0ab57feb2..1e2a88f82f7d 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java @@ -14,6 +14,7 @@ import org.elasticsearch.entitlement.bridge.EntitlementChecker; import org.elasticsearch.entitlement.runtime.policy.PolicyManager; import java.io.File; +import java.io.FileDescriptor; import java.io.FileFilter; import java.io.FilenameFilter; import java.io.IOException; @@ -1103,6 +1104,21 @@ public class ElasticsearchEntitlementChecker implements EntitlementChecker { policyManager.checkFileWrite(callerClass, file); } + @Override + public void check$java_io_FileInputStream$(Class callerClass, File file) { + policyManager.checkFileRead(callerClass, file); + } + + @Override + public void check$java_io_FileInputStream$(Class callerClass, FileDescriptor fd) { + policyManager.checkFileDescriptorRead(callerClass); + } + + @Override + public void check$java_io_FileInputStream$(Class callerClass, String name) { + policyManager.checkFileRead(callerClass, new File(name)); + } + @Override public void check$java_io_FileOutputStream$(Class callerClass, String name) { policyManager.checkFileWrite(callerClass, new File(name)); @@ -1123,6 +1139,99 @@ public class ElasticsearchEntitlementChecker implements EntitlementChecker { policyManager.checkFileWrite(callerClass, file); } + @Override + public void check$java_io_FileOutputStream$(Class callerClass, FileDescriptor fd) { + policyManager.checkFileDescriptorWrite(callerClass); + } + + @Override + public void check$java_io_FileReader$(Class callerClass, File file) { + policyManager.checkFileRead(callerClass, file); + } + + @Override + public void check$java_io_FileReader$(Class callerClass, File file, Charset charset) { + policyManager.checkFileRead(callerClass, file); + } + + @Override + public void check$java_io_FileReader$(Class callerClass, FileDescriptor fd) { + policyManager.checkFileDescriptorRead(callerClass); + } + + @Override + public void check$java_io_FileReader$(Class callerClass, String name) { + policyManager.checkFileRead(callerClass, new File(name)); + } + + @Override + public void check$java_io_FileReader$(Class callerClass, String name, Charset charset) { + policyManager.checkFileRead(callerClass, new File(name)); + } + + @Override + public void check$java_io_FileWriter$(Class callerClass, File file) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_io_FileWriter$(Class callerClass, File file, boolean append) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_io_FileWriter$(Class callerClass, File file, Charset charset) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_io_FileWriter$(Class callerClass, File file, Charset charset, boolean append) { + policyManager.checkFileWrite(callerClass, file); + } + + @Override + public void check$java_io_FileWriter$(Class callerClass, FileDescriptor fd) { + policyManager.checkFileDescriptorWrite(callerClass); + } + + @Override + public void check$java_io_FileWriter$(Class callerClass, String name) { + policyManager.checkFileWrite(callerClass, new File(name)); + } + + @Override + public void check$java_io_FileWriter$(Class callerClass, String name, boolean append) { + policyManager.checkFileWrite(callerClass, new File(name)); + } + + @Override + public void check$java_io_FileWriter$(Class callerClass, String name, Charset charset) { + policyManager.checkFileWrite(callerClass, new File(name)); + } + + @Override + public void check$java_io_FileWriter$(Class callerClass, String name, Charset charset, boolean append) { + policyManager.checkFileWrite(callerClass, new File(name)); + } + + @Override + public void check$java_io_RandomAccessFile$(Class callerClass, String name, String mode) { + if (mode.equals("r")) { + policyManager.checkFileRead(callerClass, new File(name)); + } else { + policyManager.checkFileWrite(callerClass, new File(name)); + } + } + + @Override + public void check$java_io_RandomAccessFile$(Class callerClass, File file, String mode) { + if (mode.equals("r")) { + policyManager.checkFileRead(callerClass, file); + } else { + policyManager.checkFileWrite(callerClass, file); + } + } + @Override public void check$java_util_Scanner$(Class callerClass, File source) { policyManager.checkFileRead(callerClass, source); diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java index 33ccf6fb05c9..d607a5e25b41 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java @@ -304,6 +304,14 @@ public class PolicyManager { } } + public void checkFileDescriptorRead(Class callerClass) { + neverEntitled(callerClass, () -> "read file descriptor"); + } + + public void checkFileDescriptorWrite(Class callerClass) { + neverEntitled(callerClass, () -> "write file descriptor"); + } + /** * Invoked when we try to get an arbitrary {@code FileAttributeView} class. Such a class can modify attributes, like owner etc.; * we could think about introducing checks for each of the operations, but for now we over-approximate this and simply deny when it is diff --git a/modules/ingest-geoip/src/main/plugin-metadata/entitlement-policy.yaml b/modules/ingest-geoip/src/main/plugin-metadata/entitlement-policy.yaml new file mode 100644 index 000000000000..cef2e0cd6aeb --- /dev/null +++ b/modules/ingest-geoip/src/main/plugin-metadata/entitlement-policy.yaml @@ -0,0 +1,5 @@ +com.maxmind.db: + - files: + - relative_path: "ingest-geoip/" + relative_to: "config" + mode: "read_write" diff --git a/modules/repository-s3/src/main/plugin-metadata/entitlement-policy.yaml b/modules/repository-s3/src/main/plugin-metadata/entitlement-policy.yaml index 394e5e38d9f5..e88b31b86fca 100644 --- a/modules/repository-s3/src/main/plugin-metadata/entitlement-policy.yaml +++ b/modules/repository-s3/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,3 +1,7 @@ ALL-UNNAMED: - manage_threads - outbound_network + - files: + - relative_path: "repository-s3/aws-web-identity-token-file" + relative_to: "config" + mode: "read" diff --git a/x-pack/plugin/blob-cache/src/main/plugin-metadata/entitlement-policy.yaml b/x-pack/plugin/blob-cache/src/main/plugin-metadata/entitlement-policy.yaml new file mode 100644 index 000000000000..34bac2441161 --- /dev/null +++ b/x-pack/plugin/blob-cache/src/main/plugin-metadata/entitlement-policy.yaml @@ -0,0 +1,5 @@ +org.elasticsearch.blobcache: + - files: + - relative_path: "shared_snapshot_cache" + relative_to: "data" + mode: "read_write" diff --git a/x-pack/plugin/watcher/src/main/plugin-metadata/entitlement-policy.yaml b/x-pack/plugin/watcher/src/main/plugin-metadata/entitlement-policy.yaml index 2eb0d0dbd988..4065b2545144 100644 --- a/x-pack/plugin/watcher/src/main/plugin-metadata/entitlement-policy.yaml +++ b/x-pack/plugin/watcher/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,2 +1,10 @@ ALL-UNNAMED: - manage_threads + - files: + - relative_path: ".mime.types" + relative_to: "home" + mode: "read" + - relative_path: ".mailcap" + relative_to: "home" + mode: "read" + From 229d392e63e262ff8e68e0ee74f50b520e237b0c Mon Sep 17 00:00:00 2001 From: Mike Pellegrini Date: Tue, 18 Feb 2025 08:26:35 -0500 Subject: [PATCH 10/29] Fix ArrayIndexOutOfBoundsException in ShardBulkInferenceActionFilter (#122538) --- docs/changelog/122538.yaml | 5 +++ .../ShardBulkInferenceActionFilterIT.java | 45 +++++++++++++++++++ .../ShardBulkInferenceActionFilter.java | 6 +-- 3 files changed, 53 insertions(+), 3 deletions(-) create mode 100644 docs/changelog/122538.yaml diff --git a/docs/changelog/122538.yaml b/docs/changelog/122538.yaml new file mode 100644 index 000000000000..441f454923cb --- /dev/null +++ b/docs/changelog/122538.yaml @@ -0,0 +1,5 @@ +pr: 122538 +summary: Fix `ArrayIndexOutOfBoundsException` in `ShardBulkInferenceActionFilter` +area: Ingest +type: bug +issues: [] diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java index 303f957c7ab2..b8e594e70abd 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java @@ -44,6 +44,7 @@ import java.util.Map; import java.util.Set; import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldTests.randomSemanticTextInput; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class ShardBulkInferenceActionFilterIT extends ESIntegTestCase { @@ -183,4 +184,48 @@ public class ShardBulkInferenceActionFilterIT extends ESIntegTestCase { searchResponse.decRef(); } } + + public void testItemFailures() { + prepareCreate(INDEX_NAME).setMapping( + String.format( + Locale.ROOT, + """ + { + "properties": { + "sparse_field": { + "type": "semantic_text", + "inference_id": "%s" + }, + "dense_field": { + "type": "semantic_text", + "inference_id": "%s" + } + } + } + """, + TestSparseInferenceServiceExtension.TestInferenceService.NAME, + TestDenseInferenceServiceExtension.TestInferenceService.NAME + ) + ).get(); + + BulkRequestBuilder bulkReqBuilder = client().prepareBulk(); + int totalBulkSize = randomIntBetween(100, 200); // Use a bulk request size large enough to require batching + for (int bulkSize = 0; bulkSize < totalBulkSize; bulkSize++) { + String id = Integer.toString(bulkSize); + + // Set field values that will cause errors when generating inference requests + Map source = new HashMap<>(); + source.put("sparse_field", List.of(Map.of("foo", "bar"), Map.of("baz", "bar"))); + source.put("dense_field", List.of(Map.of("foo", "bar"), Map.of("baz", "bar"))); + + bulkReqBuilder.add(new IndexRequestBuilder(client()).setIndex(INDEX_NAME).setId(id).setSource(source)); + } + + BulkResponse bulkResponse = bulkReqBuilder.get(); + assertThat(bulkResponse.hasFailures(), equalTo(true)); + for (BulkItemResponse bulkItemResponse : bulkResponse.getItems()) { + assertThat(bulkItemResponse.isFailed(), equalTo(true)); + assertThat(bulkItemResponse.getFailureMessage(), containsString("expected [String|Number|Boolean]")); + } + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java index 3933260664b7..5fca096dae1e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java @@ -482,7 +482,7 @@ public class ShardBulkInferenceActionFilter implements MappedActionFilter { isUpdateRequest = true; if (updateRequest.script() != null) { addInferenceResponseFailure( - item.id(), + itemIndex, new ElasticsearchStatusException( "Cannot apply update with a script on indices that contain [{}] field(s)", RestStatus.BAD_REQUEST, @@ -540,7 +540,7 @@ public class ShardBulkInferenceActionFilter implements MappedActionFilter { if (valueObj == null || valueObj == EXPLICIT_NULL) { if (isUpdateRequest && useLegacyFormat) { addInferenceResponseFailure( - item.id(), + itemIndex, new ElasticsearchStatusException( "Field [{}] must be specified on an update request to calculate inference for field [{}]", RestStatus.BAD_REQUEST, @@ -557,7 +557,7 @@ public class ShardBulkInferenceActionFilter implements MappedActionFilter { try { values = SemanticTextUtils.nodeStringValues(field, valueObj); } catch (Exception exc) { - addInferenceResponseFailure(item.id(), exc); + addInferenceResponseFailure(itemIndex, exc); break; } From 17147a2978ef7845334a13d36ab6cb15113a4f8a Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 19 Feb 2025 01:21:01 +1100 Subject: [PATCH 11/29] Mute org.elasticsearch.snapshots.DedicatedClusterSnapshotRestoreIT testRestoreShrinkIndex #121717 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index df74656dada0..e89ad77db178 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -386,6 +386,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/122712 - class: org.elasticsearch.xpack.esql.action.EsqlActionBreakerIT issue: https://github.com/elastic/elasticsearch/issues/122810 +- class: org.elasticsearch.snapshots.DedicatedClusterSnapshotRestoreIT + method: testRestoreShrinkIndex + issue: https://github.com/elastic/elasticsearch/issues/121717 # Examples: # From 9145ab73cf710d712af6d7f9075198944f474493 Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Wed, 19 Feb 2025 01:28:38 +1100 Subject: [PATCH 12/29] Update docker.elastic.co/wolfi/chainguard-base:latest Docker digest to d74b1fd (#122839) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Update | Change | |---|---|---| | docker.elastic.co/wolfi/chainguard-base | digest | `ecd940b` -> `d74b1fd` | --- ### Configuration 📅 **Schedule**: Branch creation - "after 1pm on tuesday" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [Renovate Bot](https://redirect.github.com/renovatebot/renovate). --- .../main/java/org/elasticsearch/gradle/internal/DockerBase.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java index d461a1bfae61..b0129d26a818 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java @@ -22,7 +22,7 @@ public enum DockerBase { // Chainguard based wolfi image with latest jdk // This is usually updated via renovatebot // spotless:off - WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:ecd940be9f342ee6173397c48f3df5bb410e95000f8726fd01759b6c39b0beda", + WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:d74b1fda6b7fee2c90b410df258e005c049e0672fe16d79d00e58f14fb69f90b", "-wolfi", "apk" ), From 60176b804c4f5df190387289a81e8f653db46996 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Tue, 18 Feb 2025 16:16:29 +0100 Subject: [PATCH 13/29] [CI] Unmute RollingUpgrade*IndexCompatibilityTestCase (#122856) All those issues have been opened by test automation, before some missing transport versions was fixed (see #122580). Closes #122688 Closes #122689 Closes #122690 Closes #122691 Closes #122694 Closes #122695 Closes #122696 Closes #122697 Closes #122698 Closes #122700 Closes #122701 Closes #122702 Closes #122703 Closes #122704 Closes #122705 --- muted-tests.yml | 45 --------------------------------------------- 1 file changed, 45 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index e89ad77db178..0df627bfa8e6 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -330,54 +330,9 @@ tests: - class: org.elasticsearch.xpack.autoscaling.storage.ReactiveStorageIT method: testScaleWhileShrinking issue: https://github.com/elastic/elasticsearch/issues/122119 -- class: org.elasticsearch.lucene.RollingUpgradeLuceneIndexCompatibilityTestCase - method: testIndexUpgrade {p0=[9.1.0, 8.19.0, 8.19.0]} - issue: https://github.com/elastic/elasticsearch/issues/122688 -- class: org.elasticsearch.lucene.RollingUpgradeLuceneIndexCompatibilityTestCase - method: testRestoreIndex {p0=[9.1.0, 9.1.0, 8.19.0]} - issue: https://github.com/elastic/elasticsearch/issues/122689 -- class: org.elasticsearch.lucene.RollingUpgradeLuceneIndexCompatibilityTestCase - method: testClosedIndexUpgrade {p0=[9.1.0, 8.19.0, 8.19.0]} - issue: https://github.com/elastic/elasticsearch/issues/122690 -- class: org.elasticsearch.lucene.RollingUpgradeLuceneIndexCompatibilityTestCase - method: testRestoreIndex {p0=[9.1.0, 8.19.0, 8.19.0]} - issue: https://github.com/elastic/elasticsearch/issues/122691 - class: org.elasticsearch.xpack.searchablesnapshots.FrozenSearchableSnapshotsIntegTests method: testCreateAndRestorePartialSearchableSnapshot issue: https://github.com/elastic/elasticsearch/issues/122693 -- class: org.elasticsearch.lucene.RollingUpgradeLuceneIndexCompatibilityTestCase - method: testClosedIndexUpgrade {p0=[9.1.0, 9.1.0, 8.19.0]} - issue: https://github.com/elastic/elasticsearch/issues/122694 -- class: org.elasticsearch.lucene.RollingUpgradeLuceneIndexCompatibilityTestCase - method: testClosedIndexUpgrade {p0=[9.1.0, 9.1.0, 9.1.0]} - issue: https://github.com/elastic/elasticsearch/issues/122695 -- class: org.elasticsearch.lucene.RollingUpgradeLuceneIndexCompatibilityTestCase - method: testIndexUpgrade {p0=[9.1.0, 9.1.0, 8.19.0]} - issue: https://github.com/elastic/elasticsearch/issues/122696 -- class: org.elasticsearch.lucene.RollingUpgradeLuceneIndexCompatibilityTestCase - method: testIndexUpgrade {p0=[9.1.0, 9.1.0, 9.1.0]} - issue: https://github.com/elastic/elasticsearch/issues/122697 -- class: org.elasticsearch.lucene.RollingUpgradeLuceneIndexCompatibilityTestCase - method: testRestoreIndex {p0=[9.1.0, 9.1.0, 9.1.0]} - issue: https://github.com/elastic/elasticsearch/issues/122698 -- class: org.elasticsearch.lucene.RollingUpgradeSearchableSnapshotIndexCompatibilityIT - method: testSearchableSnapshotUpgrade {p0=[9.1.0, 8.19.0, 8.19.0]} - issue: https://github.com/elastic/elasticsearch/issues/122700 -- class: org.elasticsearch.lucene.RollingUpgradeSearchableSnapshotIndexCompatibilityIT - method: testSearchableSnapshotUpgrade {p0=[9.1.0, 9.1.0, 8.19.0]} - issue: https://github.com/elastic/elasticsearch/issues/122701 -- class: org.elasticsearch.lucene.RollingUpgradeSearchableSnapshotIndexCompatibilityIT - method: testMountSearchableSnapshot {p0=[9.1.0, 8.19.0, 8.19.0]} - issue: https://github.com/elastic/elasticsearch/issues/122702 -- class: org.elasticsearch.lucene.RollingUpgradeSearchableSnapshotIndexCompatibilityIT - method: testMountSearchableSnapshot {p0=[9.1.0, 9.1.0, 8.19.0]} - issue: https://github.com/elastic/elasticsearch/issues/122703 -- class: org.elasticsearch.lucene.RollingUpgradeSearchableSnapshotIndexCompatibilityIT - method: testSearchableSnapshotUpgrade {p0=[9.1.0, 9.1.0, 9.1.0]} - issue: https://github.com/elastic/elasticsearch/issues/122704 -- class: org.elasticsearch.lucene.RollingUpgradeSearchableSnapshotIndexCompatibilityIT - method: testMountSearchableSnapshot {p0=[9.1.0, 9.1.0, 9.1.0]} - issue: https://github.com/elastic/elasticsearch/issues/122705 - class: org.elasticsearch.search.basic.SearchWithRandomDisconnectsIT method: testSearchWithRandomDisconnects issue: https://github.com/elastic/elasticsearch/issues/122707 From f5927055fe675d05a3bce1808db3609153680c7e Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 18 Feb 2025 07:41:10 -0800 Subject: [PATCH 14/29] Use ES build instead of xpack inside xpack info action (#122859) Xpack used to be a plugin to Elasticsearch, and existed in a separate repository. So it had a separate build hash and date, which is exposed through the xpack info API. Since xpack is no longer a plugin, and part of the Elasticsearch repository, having a separate build info is no longer necessary. This commit keeps the xpack info content intact, but switches the implemntation to look at the Elasticsearch build details, and removes the xpack build info class. --- .../elasticsearch/xpack/core/XPackBuild.java | 77 ------------------- .../core/action/TransportXPackInfoAction.java | 4 +- 2 files changed, 2 insertions(+), 79 deletions(-) delete mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackBuild.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackBuild.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackBuild.java deleted file mode 100644 index 1cea4c23bcca..000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackBuild.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.core; - -import org.elasticsearch.core.PathUtils; -import org.elasticsearch.core.SuppressForbidden; - -import java.io.IOException; -import java.net.URISyntaxException; -import java.net.URL; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.jar.JarInputStream; -import java.util.jar.Manifest; - -/** - * Information about the built version of x-pack that is running. - */ -public class XPackBuild { - - public static final XPackBuild CURRENT; - - static { - final String shortHash; - final String date; - - Path path = getElasticsearchCodebase(); - if (path.toString().endsWith(".jar")) { - try (JarInputStream jar = new JarInputStream(Files.newInputStream(path))) { - Manifest manifest = jar.getManifest(); - shortHash = manifest.getMainAttributes().getValue("Change"); - date = manifest.getMainAttributes().getValue("Build-Date"); - } catch (IOException e) { - throw new RuntimeException(e); - } - } else { - // not running from a jar (unit tests, IDE) - shortHash = "Unknown"; - date = "Unknown"; - } - - CURRENT = new XPackBuild(shortHash, date); - } - - /** - * Returns path to xpack codebase path - */ - @SuppressForbidden(reason = "looks up path of xpack.jar directly") - static Path getElasticsearchCodebase() { - URL url = XPackBuild.class.getProtectionDomain().getCodeSource().getLocation(); - try { - return PathUtils.get(url.toURI()); - } catch (URISyntaxException bogus) { - throw new RuntimeException(bogus); - } - } - - private String shortHash; - private String date; - - XPackBuild(String shortHash, String date) { - this.shortHash = shortHash; - this.date = date; - } - - public String shortHash() { - return shortHash; - } - - public String date() { - return date; - } -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java index 84e27b08c1d3..5a0c6a31d1fb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.core.action; +import org.elasticsearch.Build; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; @@ -23,7 +24,6 @@ import org.elasticsearch.protocol.xpack.XPackInfoResponse.FeatureSetsInfo.Featur import org.elasticsearch.protocol.xpack.XPackInfoResponse.LicenseInfo; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.XPackBuild; import java.util.HashSet; import java.util.List; @@ -58,7 +58,7 @@ public class TransportXPackInfoAction extends HandledTransportAction Date: Tue, 18 Feb 2025 17:06:27 +0100 Subject: [PATCH 15/29] [Test] Fix IndexAliasesTests#testRemoveIndex (#122864) Exclude `.security*` aliases when calling Get Aliases API with the admin superuser. Resolves https://github.com/elastic/elasticsearch/issues/122221 --- muted-tests.yml | 3 --- .../elasticsearch/xpack/security/authz/IndexAliasesTests.java | 2 +- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 0df627bfa8e6..3719c20194b9 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -288,9 +288,6 @@ tests: - class: org.elasticsearch.xpack.esql.action.CrossClusterAsyncQueryStopIT method: testStopQueryLocal issue: https://github.com/elastic/elasticsearch/issues/121672 -- class: org.elasticsearch.xpack.security.authz.IndexAliasesTests - method: testRemoveIndex - issue: https://github.com/elastic/elasticsearch/issues/122221 - class: org.elasticsearch.blocks.SimpleBlocksIT method: testConcurrentAddBlock issue: https://github.com/elastic/elasticsearch/issues/122324 diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java index c1dbb004196a..63a15699cf0c 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java @@ -881,7 +881,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { GetAliasesResponse getAliasesResponse = client.admin().indices().prepareGetAliases(TEST_REQUEST_TIMEOUT).setAliases("*").get(); assertThat(getAliasesResponse.getAliases().size(), equalTo(0)); assertAliases( - indicesAdmin().prepareGetAliases(TEST_REQUEST_TIMEOUT).setAliases("*"), + indicesAdmin().prepareGetAliases(TEST_REQUEST_TIMEOUT).setAliases("*", "-.security*"), "bogus_index_1", "bogus_alias_1", "bogus_alias_2" From 894db683576c480fbf4e76825cc8700103dc8536 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Tue, 18 Feb 2025 11:46:04 -0500 Subject: [PATCH 16/29] Potentially addressing bbq bwc failures and added logging (#122553) I have ran this many times locally, and it never failed. Maybe there is something "magical" in CI. Added some additional info in the assertion logging. --- muted-tests.yml | 12 ------ .../upgrades/VectorSearchIT.java | 38 ++++++++++++++----- 2 files changed, 28 insertions(+), 22 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 3719c20194b9..9024bc57acb2 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -227,18 +227,6 @@ tests: - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=ml/*} issue: https://github.com/elastic/elasticsearch/issues/120816 -- class: org.elasticsearch.upgrades.VectorSearchIT - method: testBBQVectorSearch {upgradedNodes=0} - issue: https://github.com/elastic/elasticsearch/issues/121253 -- class: org.elasticsearch.upgrades.VectorSearchIT - method: testBBQVectorSearch {upgradedNodes=1} - issue: https://github.com/elastic/elasticsearch/issues/121271 -- class: org.elasticsearch.upgrades.VectorSearchIT - method: testBBQVectorSearch {upgradedNodes=2} - issue: https://github.com/elastic/elasticsearch/issues/121272 -- class: org.elasticsearch.upgrades.VectorSearchIT - method: testBBQVectorSearch {upgradedNodes=3} - issue: https://github.com/elastic/elasticsearch/issues/121273 - class: org.elasticsearch.xpack.security.authc.ldap.ActiveDirectorySessionFactoryTests issue: https://github.com/elastic/elasticsearch/issues/121285 - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java index 5308fe85c1ca..07034618be4a 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java @@ -13,6 +13,7 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.support.XContentMapValues; @@ -456,7 +457,11 @@ public class VectorSearchIT extends AbstractRollingUpgradeTestCase { } """; // create index and index 10 random floating point vectors - createIndex(BBQ_INDEX_NAME, Settings.EMPTY, mapping); + createIndex( + BBQ_INDEX_NAME, + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build(), + mapping + ); index64DimVectors(BBQ_INDEX_NAME); // force merge the index client().performRequest(new Request("POST", "/" + BBQ_INDEX_NAME + "/_forcemerge?max_num_segments=1")); @@ -485,8 +490,8 @@ public class VectorSearchIT extends AbstractRollingUpgradeTestCase { Map response = search(searchRequest); assertThat(extractValue(response, "hits.total.value"), equalTo(7)); List> hits = extractValue(response, "hits.hits"); - assertThat(hits.get(0).get("_id"), equalTo("0")); - assertThat((double) hits.get(0).get("_score"), closeTo(1.9869276, 0.0001)); + assertThat("hits: " + response, hits.get(0).get("_id"), equalTo("0")); + assertThat("hits: " + response, (double) hits.get(0).get("_score"), closeTo(1.9869276, 0.0001)); // search with knn searchRequest = new Request("POST", "/" + BBQ_INDEX_NAME + "/_search"); @@ -504,8 +509,12 @@ public class VectorSearchIT extends AbstractRollingUpgradeTestCase { response = search(searchRequest); assertThat(extractValue(response, "hits.total.value"), equalTo(2)); hits = extractValue(response, "hits.hits"); - assertThat(hits.get(0).get("_id"), equalTo("0")); - assertThat((double) hits.get(0).get("_score"), closeTo(0.9934857, 0.005)); + assertThat("expected: 0 received" + hits.get(0).get("_id") + " hits: " + response, hits.get(0).get("_id"), equalTo("0")); + assertThat( + "expected_near: 0.99 received" + hits.get(0).get("_score") + "hits: " + response, + (double) hits.get(0).get("_score"), + closeTo(0.9934857, 0.005) + ); } public void testFlatBBQVectorSearch() throws Exception { @@ -530,7 +539,11 @@ public class VectorSearchIT extends AbstractRollingUpgradeTestCase { } """; // create index and index 10 random floating point vectors - createIndex(FLAT_BBQ_INDEX_NAME, Settings.EMPTY, mapping); + createIndex( + FLAT_BBQ_INDEX_NAME, + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build(), + mapping + ); index64DimVectors(FLAT_BBQ_INDEX_NAME); // force merge the index client().performRequest(new Request("POST", "/" + FLAT_BBQ_INDEX_NAME + "/_forcemerge?max_num_segments=1")); @@ -559,8 +572,8 @@ public class VectorSearchIT extends AbstractRollingUpgradeTestCase { Map response = search(searchRequest); assertThat(extractValue(response, "hits.total.value"), equalTo(7)); List> hits = extractValue(response, "hits.hits"); - assertThat(hits.get(0).get("_id"), equalTo("0")); - assertThat((double) hits.get(0).get("_score"), closeTo(1.9869276, 0.0001)); + assertThat("hits: " + response, hits.get(0).get("_id"), equalTo("0")); + assertThat("hits: " + response, (double) hits.get(0).get("_score"), closeTo(1.9869276, 0.0001)); // search with knn searchRequest = new Request("POST", "/" + FLAT_BBQ_INDEX_NAME + "/_search"); @@ -578,8 +591,12 @@ public class VectorSearchIT extends AbstractRollingUpgradeTestCase { response = search(searchRequest); assertThat(extractValue(response, "hits.total.value"), equalTo(2)); hits = extractValue(response, "hits.hits"); - assertThat(hits.get(0).get("_id"), equalTo("0")); - assertThat((double) hits.get(0).get("_score"), closeTo(0.9934857, 0.005)); + assertThat("expected: 0 received" + hits.get(0).get("_id") + " hits: " + response, hits.get(0).get("_id"), equalTo("0")); + assertThat( + "expected_near: 0.99 received" + hits.get(0).get("_score") + "hits: " + response, + (double) hits.get(0).get("_score"), + closeTo(0.9934857, 0.005) + ); } private void index64DimVectors(String indexName) throws Exception { @@ -605,6 +622,7 @@ public class VectorSearchIT extends AbstractRollingUpgradeTestCase { assertOK(client().performRequest(indexRequest)); } // always refresh to ensure the data is visible + flush(indexName, true); refresh(indexName); } From ba8c5764f8173db7e1771c22752483cfc4254999 Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Tue, 18 Feb 2025 09:28:26 -0800 Subject: [PATCH 17/29] Use FallbackSyntheticSourceBlockLoader for unsigned_long and scaled_float fields (#122637) --- docs/changelog/122637.yaml | 6 ++ .../mapper/extras/ScaledFloatFieldMapper.java | 83 ++++++++++++++++-- .../ScaledFloatFieldBlockLoaderTests.java | 48 +++++++++++ .../extras/ScaledFloatFieldTypeTests.java | 4 +- .../FallbackSyntheticSourceBlockLoader.java | 2 +- .../ByteFieldBlockLoaderTests.java | 5 +- .../DoubleFieldBlockLoaderTests.java | 5 +- .../FloatFieldBlockLoaderTests.java | 5 +- .../HalfFloatFieldBlockLoaderTests.java | 5 +- .../IntegerFieldBlockLoaderTests.java | 5 +- .../LongFieldBlockLoaderTests.java | 5 +- .../ShortFieldBlockLoaderTests.java | 5 +- .../NumberFieldBlockLoaderTestCase.java | 21 +++-- .../DefaultMappingParametersHandler.java | 2 +- .../unsignedlong/UnsignedLongFieldMapper.java | 84 ++++++++++++++++++- .../UnsignedLongFieldBlockLoaderTests.java | 37 ++++++++ .../UnsignedLongFieldTypeTests.java | 3 +- 17 files changed, 296 insertions(+), 29 deletions(-) create mode 100644 docs/changelog/122637.yaml create mode 100644 modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldBlockLoaderTests.java rename {server/src/test/java/org/elasticsearch/index/mapper/blockloader => test/framework/src/main/java/org/elasticsearch/index/mapper}/NumberFieldBlockLoaderTestCase.java (72%) create mode 100644 x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldBlockLoaderTests.java diff --git a/docs/changelog/122637.yaml b/docs/changelog/122637.yaml new file mode 100644 index 000000000000..e9c108311d20 --- /dev/null +++ b/docs/changelog/122637.yaml @@ -0,0 +1,6 @@ +pr: 122637 +summary: Use `FallbackSyntheticSourceBlockLoader` for `unsigned_long` and `scaled_float` + fields +area: Mapping +type: enhancement +issues: [] diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java index a91ca66faa40..8f269ad4066c 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java @@ -33,6 +33,7 @@ import org.elasticsearch.index.mapper.BlockDocValuesReader; import org.elasticsearch.index.mapper.BlockLoader; import org.elasticsearch.index.mapper.BlockSourceReader; import org.elasticsearch.index.mapper.DocumentParserContext; +import org.elasticsearch.index.mapper.FallbackSyntheticSourceBlockLoader; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.IgnoreMalformedStoredValues; import org.elasticsearch.index.mapper.MapperBuilderContext; @@ -195,7 +196,9 @@ public class ScaledFloatFieldMapper extends FieldMapper { scalingFactor.getValue(), nullValue.getValue(), metric.getValue(), - indexMode + indexMode, + coerce.getValue().value(), + context.isSourceSynthetic() ); return new ScaledFloatFieldMapper(leafName(), type, builderParams(this, context), context.isSourceSynthetic(), this); } @@ -209,6 +212,8 @@ public class ScaledFloatFieldMapper extends FieldMapper { private final Double nullValue; private final TimeSeriesParams.MetricType metricType; private final IndexMode indexMode; + private final boolean coerce; + private final boolean isSyntheticSource; public ScaledFloatFieldType( String name, @@ -219,13 +224,17 @@ public class ScaledFloatFieldMapper extends FieldMapper { double scalingFactor, Double nullValue, TimeSeriesParams.MetricType metricType, - IndexMode indexMode + IndexMode indexMode, + boolean coerce, + boolean isSyntheticSource ) { super(name, indexed, stored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta); this.scalingFactor = scalingFactor; this.nullValue = nullValue; this.metricType = metricType; this.indexMode = indexMode; + this.coerce = coerce; + this.isSyntheticSource = isSyntheticSource; } public ScaledFloatFieldType(String name, double scalingFactor) { @@ -233,7 +242,7 @@ public class ScaledFloatFieldMapper extends FieldMapper { } public ScaledFloatFieldType(String name, double scalingFactor, boolean indexed) { - this(name, indexed, false, true, Collections.emptyMap(), scalingFactor, null, null, null); + this(name, indexed, false, true, Collections.emptyMap(), scalingFactor, null, null, null, false, false); } public double getScalingFactor() { @@ -315,6 +324,15 @@ public class ScaledFloatFieldMapper extends FieldMapper { double scalingFactorInverse = 1d / scalingFactor; return new BlockDocValuesReader.DoublesBlockLoader(name(), l -> l * scalingFactorInverse); } + if (isSyntheticSource) { + return new FallbackSyntheticSourceBlockLoader(fallbackSyntheticSourceBlockLoaderReader(), name()) { + @Override + public Builder builder(BlockFactory factory, int expectedCount) { + return factory.doubles(expectedCount); + } + }; + } + ValueFetcher valueFetcher = sourceValueFetcher(blContext.sourcePaths(name())); BlockSourceReader.LeafIteratorLookup lookup = isStored() || isIndexed() ? BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name()) @@ -322,6 +340,57 @@ public class ScaledFloatFieldMapper extends FieldMapper { return new BlockSourceReader.DoublesBlockLoader(valueFetcher, lookup); } + private FallbackSyntheticSourceBlockLoader.Reader fallbackSyntheticSourceBlockLoaderReader() { + var nullValueAdjusted = nullValue != null ? adjustSourceValue(nullValue, scalingFactor) : null; + + return new FallbackSyntheticSourceBlockLoader.ReaderWithNullValueSupport<>(nullValue) { + @Override + public void convertValue(Object value, List accumulator) { + if (coerce && value.equals("")) { + if (nullValueAdjusted != null) { + accumulator.add(nullValueAdjusted); + } + } + + try { + // Convert to doc_values format + var converted = adjustSourceValue(NumberFieldMapper.NumberType.objectToDouble(value), scalingFactor); + accumulator.add(converted); + } catch (Exception e) { + // Malformed value, skip it + } + } + + @Override + protected void parseNonNullValue(XContentParser parser, List accumulator) throws IOException { + // Aligned with implementation of `parseCreateField(XContentParser)` + if (coerce && parser.currentToken() == XContentParser.Token.VALUE_STRING && parser.textLength() == 0) { + if (nullValueAdjusted != null) { + accumulator.add(nullValueAdjusted); + } + } + + try { + double value = parser.doubleValue(coerce); + // Convert to doc_values format + var converted = adjustSourceValue(value, scalingFactor); + accumulator.add(converted); + } catch (Exception e) { + // Malformed value, skip it + } + } + + @Override + public void writeToBlock(List values, BlockLoader.Builder blockBuilder) { + var longBuilder = (BlockLoader.DoubleBuilder) blockBuilder; + + for (var value : values) { + longBuilder.appendDouble(value); + } + } + }; + } + @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { FielddataOperation operation = fieldDataContext.fielddataOperation(); @@ -386,12 +455,16 @@ public class ScaledFloatFieldMapper extends FieldMapper { doubleValue = NumberFieldMapper.NumberType.objectToDouble(value); } - double factor = getScalingFactor(); - return Math.round(doubleValue * factor) / factor; + return adjustSourceValue(doubleValue, getScalingFactor()); } }; } + // Adjusts precision of a double value so that it looks like it came from doc_values. + private static Double adjustSourceValue(double value, double scalingFactor) { + return Math.round(value * scalingFactor) / scalingFactor; + } + @Override public Object valueForDisplay(Object value) { if (value == null) { diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldBlockLoaderTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldBlockLoaderTests.java new file mode 100644 index 000000000000..5541b5677f00 --- /dev/null +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldBlockLoaderTests.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.mapper.extras; + +import org.elasticsearch.index.mapper.NumberFieldBlockLoaderTestCase; +import org.elasticsearch.logsdb.datageneration.FieldType; +import org.elasticsearch.plugins.Plugin; + +import java.util.Collection; +import java.util.List; +import java.util.Map; + +public class ScaledFloatFieldBlockLoaderTests extends NumberFieldBlockLoaderTestCase { + public ScaledFloatFieldBlockLoaderTests() { + super(FieldType.SCALED_FLOAT); + } + + @Override + protected Double convert(Number value, Map fieldMapping) { + var scalingFactor = ((Number) fieldMapping.get("scaling_factor")).doubleValue(); + + var docValues = (boolean) fieldMapping.getOrDefault("doc_values", false); + + // There is a slight inconsistency between values that are read from doc_values and from source. + // Due to how precision reduction is applied to source values so that they are consistent with doc_values. + // See #122547. + if (docValues) { + var reverseScalingFactor = 1d / scalingFactor; + return Math.round(value.doubleValue() * scalingFactor) * reverseScalingFactor; + } + + // Adjust values coming from source to the way they are stored in doc_values. + // See mapper implementation. + return Math.round(value.doubleValue() * scalingFactor) / scalingFactor; + } + + @Override + protected Collection getPlugins() { + return List.of(new MapperExtrasPlugin()); + } +} diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldTypeTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldTypeTests.java index 0c69ec625858..aaf1abc735c3 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldTypeTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldTypeTests.java @@ -95,7 +95,9 @@ public class ScaledFloatFieldTypeTests extends FieldTypeTestCase { 0.1 + randomDouble() * 100, null, null, - null + null, + false, + false ); Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null)); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FallbackSyntheticSourceBlockLoader.java b/server/src/main/java/org/elasticsearch/index/mapper/FallbackSyntheticSourceBlockLoader.java index 64f4d1bec04c..68b2e31a4b01 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FallbackSyntheticSourceBlockLoader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FallbackSyntheticSourceBlockLoader.java @@ -293,6 +293,6 @@ public abstract class FallbackSyntheticSourceBlockLoader implements BlockLoader parseNonNullValue(parser, accumulator); } - abstract void parseNonNullValue(XContentParser parser, List accumulator) throws IOException; + protected abstract void parseNonNullValue(XContentParser parser, List accumulator) throws IOException; } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/ByteFieldBlockLoaderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/ByteFieldBlockLoaderTests.java index 28d7cbcfb42d..0f8283c4fd42 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/ByteFieldBlockLoaderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/ByteFieldBlockLoaderTests.java @@ -9,15 +9,18 @@ package org.elasticsearch.index.mapper.blockloader; +import org.elasticsearch.index.mapper.NumberFieldBlockLoaderTestCase; import org.elasticsearch.logsdb.datageneration.FieldType; +import java.util.Map; + public class ByteFieldBlockLoaderTests extends NumberFieldBlockLoaderTestCase { public ByteFieldBlockLoaderTests() { super(FieldType.BYTE); } @Override - protected Integer convert(Number value) { + protected Integer convert(Number value, Map fieldMapping) { // All values that fit into int are represented as ints return value.intValue(); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/DoubleFieldBlockLoaderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/DoubleFieldBlockLoaderTests.java index e0b62b21ad87..5a504487680c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/DoubleFieldBlockLoaderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/DoubleFieldBlockLoaderTests.java @@ -9,15 +9,18 @@ package org.elasticsearch.index.mapper.blockloader; +import org.elasticsearch.index.mapper.NumberFieldBlockLoaderTestCase; import org.elasticsearch.logsdb.datageneration.FieldType; +import java.util.Map; + public class DoubleFieldBlockLoaderTests extends NumberFieldBlockLoaderTestCase { public DoubleFieldBlockLoaderTests() { super(FieldType.DOUBLE); } @Override - protected Double convert(Number value) { + protected Double convert(Number value, Map fieldMapping) { return value.doubleValue(); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/FloatFieldBlockLoaderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/FloatFieldBlockLoaderTests.java index 63439a97d7c9..9b82293d3391 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/FloatFieldBlockLoaderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/FloatFieldBlockLoaderTests.java @@ -9,15 +9,18 @@ package org.elasticsearch.index.mapper.blockloader; +import org.elasticsearch.index.mapper.NumberFieldBlockLoaderTestCase; import org.elasticsearch.logsdb.datageneration.FieldType; +import java.util.Map; + public class FloatFieldBlockLoaderTests extends NumberFieldBlockLoaderTestCase { public FloatFieldBlockLoaderTests() { super(FieldType.FLOAT); } @Override - protected Double convert(Number value) { + protected Double convert(Number value, Map fieldMapping) { // All float values are represented as double return value.doubleValue(); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/HalfFloatFieldBlockLoaderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/HalfFloatFieldBlockLoaderTests.java index 1e8cedb734af..23e6c86747f1 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/HalfFloatFieldBlockLoaderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/HalfFloatFieldBlockLoaderTests.java @@ -10,15 +10,18 @@ package org.elasticsearch.index.mapper.blockloader; import org.apache.lucene.sandbox.document.HalfFloatPoint; +import org.elasticsearch.index.mapper.NumberFieldBlockLoaderTestCase; import org.elasticsearch.logsdb.datageneration.FieldType; +import java.util.Map; + public class HalfFloatFieldBlockLoaderTests extends NumberFieldBlockLoaderTestCase { public HalfFloatFieldBlockLoaderTests() { super(FieldType.HALF_FLOAT); } @Override - protected Double convert(Number value) { + protected Double convert(Number value, Map fieldMapping) { // All float values are represented as double return (double) HalfFloatPoint.sortableShortToHalfFloat(HalfFloatPoint.halfFloatToSortableShort(value.floatValue())); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/IntegerFieldBlockLoaderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/IntegerFieldBlockLoaderTests.java index 5d7b9d78442c..6a9c9ca4ba7d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/IntegerFieldBlockLoaderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/IntegerFieldBlockLoaderTests.java @@ -9,15 +9,18 @@ package org.elasticsearch.index.mapper.blockloader; +import org.elasticsearch.index.mapper.NumberFieldBlockLoaderTestCase; import org.elasticsearch.logsdb.datageneration.FieldType; +import java.util.Map; + public class IntegerFieldBlockLoaderTests extends NumberFieldBlockLoaderTestCase { public IntegerFieldBlockLoaderTests() { super(FieldType.INTEGER); } @Override - protected Integer convert(Number value) { + protected Integer convert(Number value, Map fieldMapping) { return value.intValue(); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/LongFieldBlockLoaderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/LongFieldBlockLoaderTests.java index ff953294fb61..b6425029175e 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/LongFieldBlockLoaderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/LongFieldBlockLoaderTests.java @@ -9,15 +9,18 @@ package org.elasticsearch.index.mapper.blockloader; +import org.elasticsearch.index.mapper.NumberFieldBlockLoaderTestCase; import org.elasticsearch.logsdb.datageneration.FieldType; +import java.util.Map; + public class LongFieldBlockLoaderTests extends NumberFieldBlockLoaderTestCase { public LongFieldBlockLoaderTests() { super(FieldType.LONG); } @Override - protected Long convert(Number value) { + protected Long convert(Number value, Map fieldMapping) { return value.longValue(); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/ShortFieldBlockLoaderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/ShortFieldBlockLoaderTests.java index a40bc1c404f4..1f3286c3398c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/ShortFieldBlockLoaderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/blockloader/ShortFieldBlockLoaderTests.java @@ -9,15 +9,18 @@ package org.elasticsearch.index.mapper.blockloader; +import org.elasticsearch.index.mapper.NumberFieldBlockLoaderTestCase; import org.elasticsearch.logsdb.datageneration.FieldType; +import java.util.Map; + public class ShortFieldBlockLoaderTests extends NumberFieldBlockLoaderTestCase { public ShortFieldBlockLoaderTests() { super(FieldType.SHORT); } @Override - protected Integer convert(Number value) { + protected Integer convert(Number value, Map fieldMapping) { // All values that fit into int are represented as ints return value.intValue(); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/NumberFieldBlockLoaderTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/NumberFieldBlockLoaderTestCase.java similarity index 72% rename from server/src/test/java/org/elasticsearch/index/mapper/blockloader/NumberFieldBlockLoaderTestCase.java rename to test/framework/src/main/java/org/elasticsearch/index/mapper/NumberFieldBlockLoaderTestCase.java index e523d011c3ab..e5f68362005d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/blockloader/NumberFieldBlockLoaderTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/NumberFieldBlockLoaderTestCase.java @@ -7,9 +7,8 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.index.mapper.blockloader; +package org.elasticsearch.index.mapper; -import org.elasticsearch.index.mapper.BlockLoaderTestCase; import org.elasticsearch.logsdb.datageneration.FieldType; import java.util.List; @@ -24,25 +23,29 @@ public abstract class NumberFieldBlockLoaderTestCase extends B @Override @SuppressWarnings("unchecked") protected Object expected(Map fieldMapping, Object value, boolean syntheticSource) { - var nullValue = fieldMapping.get("null_value") != null ? convert((Number) fieldMapping.get("null_value")) : null; + var nullValue = fieldMapping.get("null_value") != null ? convert((Number) fieldMapping.get("null_value"), fieldMapping) : null; if (value instanceof List == false) { - return convert(value, nullValue); + return convert(value, nullValue, fieldMapping); } if ((boolean) fieldMapping.getOrDefault("doc_values", false)) { // Sorted and no duplicates - var resultList = ((List) value).stream().map(v -> convert(v, nullValue)).filter(Objects::nonNull).sorted().toList(); + var resultList = ((List) value).stream() + .map(v -> convert(v, nullValue, fieldMapping)) + .filter(Objects::nonNull) + .sorted() + .toList(); return maybeFoldList(resultList); } // parsing from source - var resultList = ((List) value).stream().map(v -> convert(v, nullValue)).filter(Objects::nonNull).toList(); + var resultList = ((List) value).stream().map(v -> convert(v, nullValue, fieldMapping)).filter(Objects::nonNull).toList(); return maybeFoldList(resultList); } @SuppressWarnings("unchecked") - private T convert(Object value, T nullValue) { + private T convert(Object value, T nullValue, Map fieldMapping) { if (value == null) { return nullValue; } @@ -51,12 +54,12 @@ public abstract class NumberFieldBlockLoaderTestCase extends B return nullValue; } if (value instanceof Number n) { - return convert(n); + return convert(n, fieldMapping); } // Malformed values are excluded return null; } - protected abstract T convert(Number value); + protected abstract T convert(Number value, Map fieldMapping); } diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java index bf99ab71d014..e7b9140b07da 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java @@ -102,7 +102,7 @@ public class DefaultMappingParametersHandler implements DataSourceHandler { injected.put("scaling_factor", ESTestCase.randomFrom(10, 1000, 100000, 100.5)); if (ESTestCase.randomDouble() <= 0.2) { - injected.put("null_value", ESTestCase.randomFloat()); + injected.put("null_value", ESTestCase.randomDouble()); } if (ESTestCase.randomBoolean()) { diff --git a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java index 38afd235d6ea..04ea6a73e228 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java +++ b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java @@ -28,6 +28,7 @@ import org.elasticsearch.index.mapper.BlockDocValuesReader; import org.elasticsearch.index.mapper.BlockLoader; import org.elasticsearch.index.mapper.BlockSourceReader; import org.elasticsearch.index.mapper.DocumentParserContext; +import org.elasticsearch.index.mapper.FallbackSyntheticSourceBlockLoader; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.IgnoreMalformedStoredValues; import org.elasticsearch.index.mapper.MappedFieldType; @@ -207,7 +208,8 @@ public class UnsignedLongFieldMapper extends FieldMapper { meta.getValue(), dimension.getValue(), metric.getValue(), - indexMode + indexMode, + context.isSourceSynthetic() ); return new UnsignedLongFieldMapper(leafName(), fieldType, builderParams(this, context), context.isSourceSynthetic(), this); } @@ -221,6 +223,7 @@ public class UnsignedLongFieldMapper extends FieldMapper { private final boolean isDimension; private final MetricType metricType; private final IndexMode indexMode; + private final boolean isSyntheticSource; public UnsignedLongFieldType( String name, @@ -231,17 +234,19 @@ public class UnsignedLongFieldMapper extends FieldMapper { Map meta, boolean isDimension, MetricType metricType, - IndexMode indexMode + IndexMode indexMode, + boolean isSyntheticSource ) { super(name, indexed, isStored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_WITHOUT_TERMS, meta); this.nullValueFormatted = nullValueFormatted; this.isDimension = isDimension; this.metricType = metricType; this.indexMode = indexMode; + this.isSyntheticSource = isSyntheticSource; } public UnsignedLongFieldType(String name) { - this(name, true, false, true, null, Collections.emptyMap(), false, null, null); + this(name, true, false, true, null, Collections.emptyMap(), false, null, null, false); } @Override @@ -327,11 +332,24 @@ public class UnsignedLongFieldMapper extends FieldMapper { if (hasDocValues()) { return new BlockDocValuesReader.LongsBlockLoader(name()); } + if (isSyntheticSource) { + return new FallbackSyntheticSourceBlockLoader(fallbackSyntheticSourceBlockLoaderReader(), name()) { + @Override + public Builder builder(BlockFactory factory, int expectedCount) { + return factory.longs(expectedCount); + } + }; + } + ValueFetcher valueFetcher = new SourceValueFetcher(blContext.sourcePaths(name()), nullValueFormatted) { @Override protected Object parseSourceValue(Object value) { if (value.equals("")) { - return nullValueFormatted; + // `nullValueFormatted` is an unsigned value formatted for human use + // but block loaders operate with encoded signed 64 bit values + // because this is the format doc_values use. + // So we need to perform this conversion. + return unsignedToSortableSignedLong(parseUnsignedLong(nullValueFormatted)); } return unsignedToSortableSignedLong(parseUnsignedLong(value)); } @@ -342,6 +360,64 @@ public class UnsignedLongFieldMapper extends FieldMapper { return new BlockSourceReader.LongsBlockLoader(valueFetcher, lookup); } + private FallbackSyntheticSourceBlockLoader.Reader fallbackSyntheticSourceBlockLoaderReader() { + var nullValueEncoded = nullValueFormatted != null + ? (Number) unsignedToSortableSignedLong(parseUnsignedLong(nullValueFormatted)) + : null; + return new FallbackSyntheticSourceBlockLoader.ReaderWithNullValueSupport<>(nullValueFormatted) { + @Override + public void convertValue(Object value, List accumulator) { + if (value.equals("")) { + if (nullValueEncoded != null) { + accumulator.add(nullValueEncoded); + } + } + + try { + // Convert to doc_values format + var converted = unsignedToSortableSignedLong(parseUnsignedLong(value)); + accumulator.add(converted); + } catch (Exception e) { + // Malformed value, skip it + } + } + + @Override + protected void parseNonNullValue(XContentParser parser, List accumulator) throws IOException { + // Aligned with implementation of `parseCreateField(XContentParser)` + if (parser.currentToken() == XContentParser.Token.VALUE_STRING && parser.textLength() == 0) { + if (nullValueEncoded != null) { + accumulator.add(nullValueEncoded); + } + } + + try { + Long rawValue; + if (parser.currentToken() == XContentParser.Token.VALUE_NUMBER) { + rawValue = parseUnsignedLong(parser.numberValue()); + } else { + rawValue = parseUnsignedLong(parser.text()); + } + + // Convert to doc_values format + var converted = unsignedToSortableSignedLong(rawValue); + accumulator.add(converted); + } catch (Exception e) { + // Malformed value, skip it + } + } + + @Override + public void writeToBlock(List values, BlockLoader.Builder blockBuilder) { + var longBuilder = (BlockLoader.LongBuilder) blockBuilder; + + for (var value : values) { + longBuilder.appendLong(value.longValue()); + } + } + }; + } + @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { FielddataOperation operation = fieldDataContext.fielddataOperation(); diff --git a/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldBlockLoaderTests.java b/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldBlockLoaderTests.java new file mode 100644 index 000000000000..562b1da7f07e --- /dev/null +++ b/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldBlockLoaderTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.unsignedlong; + +import org.elasticsearch.index.mapper.NumberFieldBlockLoaderTestCase; +import org.elasticsearch.logsdb.datageneration.FieldType; +import org.elasticsearch.plugins.Plugin; + +import java.util.Collection; +import java.util.List; +import java.util.Map; + +public class UnsignedLongFieldBlockLoaderTests extends NumberFieldBlockLoaderTestCase { + private static final long MASK_2_63 = 0x8000000000000000L; + + public UnsignedLongFieldBlockLoaderTests() { + super(FieldType.UNSIGNED_LONG); + } + + @Override + protected Long convert(Number value, Map fieldMapping) { + // Adjust values coming from source to the way they are stored in doc_values. + // See mapper implementation. + var unsigned = value.longValue(); + return unsigned ^ MASK_2_63; + } + + @Override + protected Collection getPlugins() { + return List.of(new UnsignedLongMapperPlugin()); + } +} diff --git a/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldTypeTests.java b/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldTypeTests.java index e5f85f8b87b1..72cd64516617 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldTypeTests.java +++ b/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldTypeTests.java @@ -63,7 +63,8 @@ public class UnsignedLongFieldTypeTests extends FieldTypeTestCase { Collections.emptyMap(), false, null, - null + null, + false ); assertEquals( From 5e8865deac028c3eccced0c2768a7207fcfbd0c6 Mon Sep 17 00:00:00 2001 From: Felix Barnsteiner Date: Tue, 18 Feb 2025 18:30:37 +0100 Subject: [PATCH 18/29] Add _metric_names_hash field to OTel metric mappings (#120952) If metrics that have the same timestamp and dimensions aren't grouped into the same document, ES will consider them to be a duplicate. The _metric_names_hash field will be set by the OTel ES exporter. As it's mapped as a time_series_dimensions, it creates a different _tsid for documents with different sets of metrics. The tradeoff is that if the composition of the metrics grouping changes over time, a different _tsid will be created. That has an impact on the rate aggregation for counters. --- docs/changelog/120952.yaml | 5 ++++ .../metrics-otel@mappings.yaml | 4 ++++ .../rest-api-spec/test/20_metrics_tests.yml | 23 +++++++++++++++++++ 3 files changed, 32 insertions(+) create mode 100644 docs/changelog/120952.yaml diff --git a/docs/changelog/120952.yaml b/docs/changelog/120952.yaml new file mode 100644 index 000000000000..42d375b97992 --- /dev/null +++ b/docs/changelog/120952.yaml @@ -0,0 +1,5 @@ +pr: 120952 +summary: Add `_metric_names_hash` field to OTel metric mappings +area: Data streams +type: bug +issues: [] diff --git a/x-pack/plugin/otel-data/src/main/resources/component-templates/metrics-otel@mappings.yaml b/x-pack/plugin/otel-data/src/main/resources/component-templates/metrics-otel@mappings.yaml index 368d538c304e..cfc68889e8c9 100644 --- a/x-pack/plugin/otel-data/src/main/resources/component-templates/metrics-otel@mappings.yaml +++ b/x-pack/plugin/otel-data/src/main/resources/component-templates/metrics-otel@mappings.yaml @@ -14,6 +14,10 @@ template: type: passthrough dynamic: true priority: 10 + # workaround for https://github.com/elastic/elasticsearch/issues/99123 + _metric_names_hash: + type: keyword + time_series_dimension: true unit: type: keyword time_series_dimension: true diff --git a/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_metrics_tests.yml b/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_metrics_tests.yml index 9d5970786953..87081f974440 100644 --- a/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_metrics_tests.yml +++ b/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_metrics_tests.yml @@ -302,3 +302,26 @@ Metrics with different scope names are not duplicates: metrics: foo.bar: 42 - is_false: errors + +--- +Metrics with different metric name hashes are not duplicates: + - do: + bulk: + index: metrics-generic.otel-default + refresh: true + body: + - create: {"dynamic_templates":{"metrics.foo":"counter_long"}} + - "@timestamp": 2024-07-18T14:00:00Z + attributes: + foo: bar + metrics: + foo: 42 + _metric_names_hash: a9f37ed7 + - create: {"dynamic_templates":{"metrics.bar":"counter_long"}} + - "@timestamp": 2024-07-18T14:00:00Z + attributes: + foo: bar + metrics: + bar: 42 + _metric_names_hash: 76b77d1a + - is_false: errors From 45f0471924fded6831c2cd72d18762c6d1c4004b Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 18 Feb 2025 17:36:25 +0000 Subject: [PATCH 19/29] Bump post-recovery merge minimum version (#121997) In #113462 we introduced a version bound on post-recovery merges to avoid a spike of merge activity after an upgrade. At the time there was no `IndexVersion` unique to the 9.x series, but we have such a version now, so this commit properly restricts post-recovery merges to only apply to indices created by versions that support them. Relates ES-9620 --- .../indices/recovery/IndexRecoveryIT.java | 11 +++++++++-- .../org/elasticsearch/indices/PostRecoveryMerger.java | 4 +++- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index a8caca94289b..769ffd15d9ba 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -1967,7 +1967,14 @@ public class IndexRecoveryIT extends AbstractIndexRecoveryIntegTestCase { internalCluster().startMasterOnlyNode(); final var dataNode = internalCluster().startDataOnlyNode(); final var indexName = randomIdentifier(); - createIndex(indexName, indexSettings(1, 0).put(INDEX_MERGE_ENABLED, false).build()); + final var indexSettingsBuilder = indexSettings(1, 0).put(INDEX_MERGE_ENABLED, false); + if (randomBoolean()) { + indexSettingsBuilder.put( + IndexMetadata.SETTING_VERSION_CREATED, + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.UPGRADE_TO_LUCENE_10_0_0, IndexVersion.current()) + ); + } + createIndex(indexName, indexSettingsBuilder.build()); final var initialSegmentCount = 20; for (int i = 0; i < initialSegmentCount; i++) { @@ -2051,7 +2058,7 @@ public class IndexRecoveryIT extends AbstractIndexRecoveryIntegTestCase { IndexVersionUtils.randomVersionBetween( random(), IndexVersionUtils.getLowestWriteCompatibleVersion(), - IndexVersionUtils.getPreviousVersion(IndexVersions.MERGE_ON_RECOVERY_VERSION) + IndexVersionUtils.getPreviousVersion(IndexVersions.UPGRADE_TO_LUCENE_10_0_0) ) ) .build() diff --git a/server/src/main/java/org/elasticsearch/indices/PostRecoveryMerger.java b/server/src/main/java/org/elasticsearch/indices/PostRecoveryMerger.java index 17382e7854e9..d0ee1545fe0c 100644 --- a/server/src/main/java/org/elasticsearch/indices/PostRecoveryMerger.java +++ b/server/src/main/java/org/elasticsearch/indices/PostRecoveryMerger.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThrottledTaskRunner; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Strings; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; @@ -46,6 +47,7 @@ class PostRecoveryMerger { private static final boolean TRIGGER_MERGE_AFTER_RECOVERY; static { + @UpdateForV10(owner = UpdateForV10.Owner.DISTRIBUTED_INDEXING) // remove this escape hatch final var propertyValue = System.getProperty("es.trigger_merge_after_recovery"); if (propertyValue == null) { TRIGGER_MERGE_AFTER_RECOVERY = true; @@ -95,7 +97,7 @@ class PostRecoveryMerger { return recoveryListener; } - if (indexMetadata.getCreationVersion().before(IndexVersions.MERGE_ON_RECOVERY_VERSION)) { + if (indexMetadata.getCreationVersion().before(IndexVersions.UPGRADE_TO_LUCENE_10_0_0)) { return recoveryListener; } From b4e95dc40a1618f522e0cf7a7ff3a4f5a4433ebf Mon Sep 17 00:00:00 2001 From: Artem Prigoda Date: Tue, 18 Feb 2025 18:40:33 +0100 Subject: [PATCH 20/29] Unmute `FrozenSearchableSnapshotsIntegTests#testCreateAndRestorePartialSearchableSnapshot` (#122831) * Unmute `FrozenSearchableSnapshotsIntegTests#testCreateAndRestorePartialSearchableSnapshot` The underlying failure `java.lang.AssertionError: Searchable snapshot directory does not support the operation [createOutput` was fixed in https://github.com/elastic/elasticsearch/pull/122006. The automation bot was too aggressive in re-opening this issue. Resolve #122693 * Add a check for the CLOSED state along with STOPPED * Update x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/CacheService.java Co-authored-by: Tanguy Leroux --------- Co-authored-by: Tanguy Leroux --- muted-tests.yml | 3 --- .../xpack/searchablesnapshots/cache/full/CacheService.java | 4 ++-- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 9024bc57acb2..587ca0a097f6 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -315,9 +315,6 @@ tests: - class: org.elasticsearch.xpack.autoscaling.storage.ReactiveStorageIT method: testScaleWhileShrinking issue: https://github.com/elastic/elasticsearch/issues/122119 -- class: org.elasticsearch.xpack.searchablesnapshots.FrozenSearchableSnapshotsIntegTests - method: testCreateAndRestorePartialSearchableSnapshot - issue: https://github.com/elastic/elasticsearch/issues/122693 - class: org.elasticsearch.search.basic.SearchWithRandomDisconnectsIT method: testSearchWithRandomDisconnects issue: https://github.com/elastic/elasticsearch/issues/122707 diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/CacheService.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/CacheService.java index a02c32cd56d9..dee6fc7241d5 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/CacheService.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/CacheService.java @@ -245,8 +245,8 @@ public class CacheService extends AbstractLifecycleComponent { final Lifecycle.State state = lifecycleState(); assert state != Lifecycle.State.INITIALIZED : state; if (state != Lifecycle.State.STARTED) { - if (state == Lifecycle.State.STOPPED) { - throw new AlreadyClosedException("Failed to read data from cache: cache service is stopped"); + if (state == Lifecycle.State.STOPPED || state == Lifecycle.State.CLOSED) { + throw new AlreadyClosedException("Failed to read data from cache: cache service is [" + state + ']'); } else { throw new IllegalStateException("Failed to read data from cache: cache service is not started [" + state + "]"); } From b4f84f6f04513e0d53d73eceee6fba3b0c45308d Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 18 Feb 2025 19:20:10 +0100 Subject: [PATCH 21/29] Improve Iterators.single (#122875) Using an anonymous class here doesn't compile as expected. The resulting class comes out as: ``` class Iterators$1 implements java.util.Iterator { private T value; final java.lang.Object val$element; Iterators$1(java.lang.Object); ``` which seemingly also does not get fixed by the JIT compiler, judging by heap dumps. Lets just use a named class to clean this up and make things a bit more compact and save some heap as well here and there potentially. --- .../common/collect/Iterators.java | 32 +++++++++++-------- 1 file changed, 18 insertions(+), 14 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/collect/Iterators.java b/server/src/main/java/org/elasticsearch/common/collect/Iterators.java index 73da3f54fd8d..69b43303df5d 100644 --- a/server/src/main/java/org/elasticsearch/common/collect/Iterators.java +++ b/server/src/main/java/org/elasticsearch/common/collect/Iterators.java @@ -34,22 +34,27 @@ public class Iterators { * Returns a single element iterator over the supplied value. */ public static Iterator single(T element) { - return new Iterator<>() { + return new SingleIterator<>(element); + } - private T value = Objects.requireNonNull(element); + private static final class SingleIterator implements Iterator { + private T value; - @Override - public boolean hasNext() { - return value != null; - } + SingleIterator(T element) { + value = Objects.requireNonNull(element); + } - @Override - public T next() { - final T res = value; - value = null; - return res; - } - }; + @Override + public boolean hasNext() { + return value != null; + } + + @Override + public T next() { + final T res = value; + value = null; + return res; + } } @SafeVarargs @@ -496,5 +501,4 @@ public class Iterators { } return result; } - } From befc6a03e305f13745cb0de47f9c7e1b426494d7 Mon Sep 17 00:00:00 2001 From: Dianna Hohensee Date: Tue, 18 Feb 2025 13:33:39 -0500 Subject: [PATCH 22/29] Start the allocation architecture guide section (#121940) This is a high-level overview of the main rebalancing components and how they interact to move shards around the cluster, and decide where shards should go. Relates ES-10423 --- docs/internal/DistributedArchitectureGuide.md | 42 +++++++++++++++---- 1 file changed, 34 insertions(+), 8 deletions(-) diff --git a/docs/internal/DistributedArchitectureGuide.md b/docs/internal/DistributedArchitectureGuide.md index 11a2c860eb32..ed1b722dbb39 100644 --- a/docs/internal/DistributedArchitectureGuide.md +++ b/docs/internal/DistributedArchitectureGuide.md @@ -229,19 +229,45 @@ works in parallel with the storage engine.) # Allocation -(AllocationService runs on the master node) +### Core Components -(Discuss different deciders that limit allocation. Sketch / list the different deciders that we have.) +The `DesiredBalanceShardsAllocator` is what runs shard allocation decisions. It leverages the `DesiredBalanceComputer` to produce +`DesiredBalance` instances for the cluster based on the latest cluster changes (add/remove nodes, create/remove indices, load, etc.). Then +the `DesiredBalanceReconciler` is invoked to choose the next steps to take to move the cluster from the current shard allocation to the +latest computed `DesiredBalance` shard allocation. The `DesiredBalanceReconciler` will apply changes to a copy of the `RoutingNodes`, which +is then published in a cluster state update that will reach the data nodes to start the individual shard recovery/deletion/move work. -### APIs for Balancing Operations +The `DesiredBalanceReconciler` is throttled by cluster settings, like the max number of concurrent shard moves and recoveries per cluster +and node: this is why the `DesiredBalanceReconciler` will make, and publish via cluster state updates, incremental changes to the cluster +shard allocation. The `DesiredBalanceShardsAllocator` is the endpoint for reroute requests, which may trigger immediate requests to the +`DesiredBalanceReconciler`, but asynchronous requests to the `DesiredBalanceComputer` via the `ContinuousComputation` component. Cluster +state changes that affect shard balancing (for example index deletion) all call some reroute method interface that reaches the +`DesiredBalanceShardsAllocator` to run reconciliation and queue a request for the `DesiredBalancerComputer`, leading to desired balance +computation and reconciliation actions. Asynchronous completion of a new `DesiredBalance` will also invoke a reconciliation action, as will +cluster state updates completing shard moves/recoveries (unthrottling the next shard move/recovery). -(Significant internal APIs for balancing a cluster) +The `ContinuousComputation` saves the latest desired balance computation request, which holds the cluster information at the time of that +request, and a thread that runs the `DesiredBalanceComputer`. The `ContinuousComputation` thread takes the latest request, with the +associated cluster information, feeds it into the `DesiredBalanceComputer` and publishes a `DesiredBalance` back to the +`DesiredBalanceShardsAllocator` to use for reconciliation actions. Sometimes the `ContinuousComputation` thread's desired balance +computation will be signalled to exit early and publish the initial `DesiredBalance` improvements it has made, when newer rebalancing +requests (due to cluster state changes) have arrived, or in order to begin recovery of unassigned shards as quickly as possible. -### Heuristics for Allocation +### Rebalancing Process -### Cluster Reroute Command - -(How does this command behave with the desired auto balancer.) +There are different priorities in shard allocation, reflected in which moves the `DesiredBalancerReconciler` selects to do first given that +it can only move, recover, or remove a limited number of shards at once. The first priority is assigning unassigned shards, primaries being +more important than replicas. The second is to move shards that violate any rule (such as node resource limits) as defined by an +`AllocationDecider`. The `AllocationDeciders` holds a group of `AllocationDecider` implementations that place hard constraints on shard +allocation. There is a decider, `DiskThresholdDecider`, that manages disk memory usage thresholds, such that further shards may not be +allowed assignment to a node, or shards may be required to move off because they grew to exceed the disk space; or another, +`FilterAllocationDecider`, that excludes a configurable list of indices from certain nodes; or `MaxRetryAllocationDecider` that will not +attempt to recover a shard on a certain node after so many failed retries. The third priority is to rebalance shards to even out the +relative weight of shards on each node: the intention is to avoid, or ease, future hot-spotting on data nodes due to too many shards being +placed on the same data node. Node shard weight is based on a sum of factors: disk memory usage, projected shard write load, total number +of shards, and an incentive to distribute shards within the same index across different nodes. See the `WeightFunction` and +`NodeAllocationStatsAndWeightsCalculator` classes for more details on the weight calculations that support the `DesiredBalanceComputer` +decisions. # Autoscaling From 29cbbd99dde122d89ef3124d8db27e0f3441c2f7 Mon Sep 17 00:00:00 2001 From: Artem Prigoda Date: Tue, 18 Feb 2025 20:05:22 +0100 Subject: [PATCH 23/29] Add more logging for `IndexRecoveryIT#testSourceThrottling` (#122830) I couldn't reproduce the failure of the throttling time being zero on the recovery target node. Added more logging for the stats in case of failure. Resolve #122712 --- muted-tests.yml | 3 --- .../indices/recovery/IndexRecoveryIT.java | 12 ++++++++++-- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 587ca0a097f6..4e51e313f39f 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -318,9 +318,6 @@ tests: - class: org.elasticsearch.search.basic.SearchWithRandomDisconnectsIT method: testSearchWithRandomDisconnects issue: https://github.com/elastic/elasticsearch/issues/122707 -- class: org.elasticsearch.indices.recovery.IndexRecoveryIT - method: testSourceThrottling - issue: https://github.com/elastic/elasticsearch/issues/122712 - class: org.elasticsearch.xpack.esql.action.EsqlActionBreakerIT issue: https://github.com/elastic/elasticsearch/issues/122810 - class: org.elasticsearch.snapshots.DedicatedClusterSnapshotRestoreIT diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index 769ffd15d9ba..bc1c34c89700 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -342,9 +342,17 @@ public class IndexRecoveryIT extends AbstractIndexRecoveryIntegTestCase { assertThat(recoveryStats.currentAsSource(), equalTo(0)); assertThat(recoveryStats.currentAsTarget(), equalTo(0)); if (isRecoveryThrottlingNode) { - assertThat("Throttling should be >0 for '" + nodeName + "'", recoveryStats.throttleTime().millis(), greaterThan(0L)); + assertThat( + "Throttling should be >0 for '" + nodeName + "'. Node stats: " + nodesStatsResponse, + recoveryStats.throttleTime().millis(), + greaterThan(0L) + ); } else { - assertThat("Throttling should be =0 for '" + nodeName + "'", recoveryStats.throttleTime().millis(), equalTo(0L)); + assertThat( + "Throttling should be =0 for '" + nodeName + "'. Node stats: " + nodesStatsResponse, + recoveryStats.throttleTime().millis(), + equalTo(0L) + ); } } From 80b7879eda255790ea1b5340b404fa6564368cb1 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Tue, 18 Feb 2025 20:24:29 +0100 Subject: [PATCH 24/29] Simplify logic around missing shards check in search phases (#122817) This removes a couple of indirections: the error message for missing shards is always the same no matter the search phase. This was required to provide a slightly different error message for open PIT. The previous error was misleading when open PIT did not support setting allow_partial_search_results, but now that it does, it looks like we can unify the error message and simplify the code around it. --- .../search/retriever/RetrieverRewriteIT.java | 5 +---- .../search/CanMatchPreFilterSearchPhase.java | 2 +- .../action/search/SearchPhase.java | 20 +++---------------- .../TransportOpenPointInTimeAction.java | 6 ------ .../jdbc/single_node/JdbcShardFailureIT.java | 2 +- .../qa/single_node/JdbcShardFailureIT.java | 2 +- 6 files changed, 7 insertions(+), 30 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RetrieverRewriteIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RetrieverRewriteIT.java index 25b43a2dc946..bbe27fab2938 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RetrieverRewriteIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RetrieverRewriteIT.java @@ -127,10 +127,7 @@ public class RetrieverRewriteIT extends ESIntegTestCase { SearchPhaseExecutionException.class, client().prepareSearch(testIndex).setSource(source)::get ); - assertThat( - ex.getDetailedMessage(), - containsString("[open_point_in_time] action requires all shards to be available. Missing shards") - ); + assertThat(ex.getDetailedMessage(), containsString("Search rejected due to missing shards")); } finally { internalCluster().restartNode(randomDataNode); } diff --git a/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java index f2f5b6df3571..92cadfd1e1a6 100644 --- a/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java @@ -201,7 +201,7 @@ final class CanMatchPreFilterSearchPhase { private void checkNoMissingShards(List shards) { assert assertSearchCoordinationThread(); - SearchPhase.doCheckNoMissingShards("can_match", request, shards, SearchPhase::makeMissingShardsError); + SearchPhase.doCheckNoMissingShards("can_match", request, shards); } private Map> groupByNode(List shards) { diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java index 1308a2fb61cf..b46937ea975e 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhase.java @@ -14,7 +14,6 @@ import org.elasticsearch.transport.Transport; import java.util.List; import java.util.Objects; -import java.util.function.Function; /** * Base class for all individual search phases like collecting distributed frequencies, fetching documents, querying shards. @@ -35,26 +34,13 @@ abstract class SearchPhase { return name; } - protected String missingShardsErrorMessage(StringBuilder missingShards) { - return makeMissingShardsError(missingShards); - } - - protected static String makeMissingShardsError(StringBuilder missingShards) { + private static String makeMissingShardsError(StringBuilder missingShards) { return "Search rejected due to missing shards [" + missingShards + "]. Consider using `allow_partial_search_results` setting to bypass this error."; } - protected void doCheckNoMissingShards(String phaseName, SearchRequest request, List shardsIts) { - doCheckNoMissingShards(phaseName, request, shardsIts, this::missingShardsErrorMessage); - } - - protected static void doCheckNoMissingShards( - String phaseName, - SearchRequest request, - List shardsIts, - Function makeErrorMessage - ) { + protected static void doCheckNoMissingShards(String phaseName, SearchRequest request, List shardsIts) { assert request.allowPartialSearchResults() != null : "SearchRequest missing setting for allowPartialSearchResults"; if (request.allowPartialSearchResults() == false) { final StringBuilder missingShards = new StringBuilder(); @@ -70,7 +56,7 @@ abstract class SearchPhase { } if (missingShards.isEmpty() == false) { // Status red - shard is missing all copies and would produce partial results for an index search - final String msg = makeErrorMessage.apply(missingShards); + final String msg = makeMissingShardsError(missingShards); throw new SearchPhaseExecutionException(phaseName, msg, null, ShardSearchFailure.EMPTY_ARRAY); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java index b8d0a928e05a..43f989c5efdb 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java @@ -241,12 +241,6 @@ public class TransportOpenPointInTimeAction extends HandledTransportAction s.executeQuery("SELECT * FROM test ORDER BY test_field ASC")); - assertThat(exception.getMessage(), containsString("[open_point_in_time] action requires all shards to be available")); + assertThat(exception.getMessage(), containsString("Search rejected due to missing shards")); } } } diff --git a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShardFailureIT.java b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShardFailureIT.java index f7d08ba4e22d..a554e62d0c6d 100644 --- a/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShardFailureIT.java +++ b/x-pack/plugin/sql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/single_node/JdbcShardFailureIT.java @@ -89,7 +89,7 @@ public class JdbcShardFailureIT extends JdbcIntegrationTestCase { createTestIndex(); try (Connection c = esJdbc(); Statement s = c.createStatement()) { SQLException exception = expectThrows(SQLException.class, () -> s.executeQuery("SELECT * FROM test ORDER BY test_field ASC")); - assertThat(exception.getMessage(), containsString("[open_point_in_time] action requires all shards to be available")); + assertThat(exception.getMessage(), containsString("Search rejected due to missing shards")); } } From 2ae80c799daac539f46008f8cf70be18a328bf67 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Tue, 18 Feb 2025 12:38:00 -0700 Subject: [PATCH 25/29] Allow setting the `type` in the reroute processor (#122409) * Allow setting the `type` in the reroute processor This allows configuring the `type` from within the ingest `reroute` processor. Similar to `dataset` and `namespace`, the type defaults to the value extracted from the index name. This means that documents sent to `logs-mysql.access.default` will have a default value of `logs` for the type. Resolves #121553 * Update docs/changelog/122409.yaml --- docs/changelog/122409.yaml | 6 ++ .../ingest/processors/reroute.asciidoc | 3 + .../ingest/common/RerouteProcessor.java | 34 +++++++-- .../common/RerouteProcessorFactoryTests.java | 2 +- .../ingest/common/RerouteProcessorTests.java | 71 +++++++++++++------ 5 files changed, 88 insertions(+), 28 deletions(-) create mode 100644 docs/changelog/122409.yaml diff --git a/docs/changelog/122409.yaml b/docs/changelog/122409.yaml new file mode 100644 index 000000000000..d54be44f02f8 --- /dev/null +++ b/docs/changelog/122409.yaml @@ -0,0 +1,6 @@ +pr: 122409 +summary: Allow setting the `type` in the reroute processor +area: Ingest Node +type: enhancement +issues: + - 121553 diff --git a/docs/reference/ingest/processors/reroute.asciidoc b/docs/reference/ingest/processors/reroute.asciidoc index 482ff3b1cc11..03fc62dc0b13 100644 --- a/docs/reference/ingest/processors/reroute.asciidoc +++ b/docs/reference/ingest/processors/reroute.asciidoc @@ -45,6 +45,9 @@ Otherwise, the document will be rejected with a security exception which looks l |====== | Name | Required | Default | Description | `destination` | no | - | A static value for the target. Can't be set when the `dataset` or `namespace` option is set. +| `type` | no | `{{data_stream.type}}` a| Field references or a static value for the type part of the data stream name. In addition to the criteria for <>, cannot contain `-` and must be no longer than 100 characters. Example values are `logs` and `metrics`. + +Supports field references with a mustache-like syntax (denoted as `{{double}}` or `{{{triple}}}` curly braces). When resolving field references, the processor replaces invalid characters with `_`. Uses the `` part of the index name as a fallback if all field references resolve to a `null`, missing, or non-string value. | `dataset` | no | `{{data_stream.dataset}}` a| Field references or a static value for the dataset part of the data stream name. In addition to the criteria for <>, cannot contain `-` and must be no longer than 100 characters. Example values are `nginx.access` and `nginx.error`. Supports field references with a mustache-like syntax (denoted as `{{double}}` or `{{{triple}}}` curly braces). When resolving field references, the processor replaces invalid characters with `_`. Uses the `` part of the index name as a fallback if all field references resolve to a `null`, missing, or non-string value. diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RerouteProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RerouteProcessor.java index 52e0aae1116c..57479749e179 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RerouteProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RerouteProcessor.java @@ -26,6 +26,7 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.ingest.ConfigurationUtils.newConfigurationException; import static org.elasticsearch.ingest.common.RerouteProcessor.DataStreamValueSource.DATASET_VALUE_SOURCE; import static org.elasticsearch.ingest.common.RerouteProcessor.DataStreamValueSource.NAMESPACE_VALUE_SOURCE; +import static org.elasticsearch.ingest.common.RerouteProcessor.DataStreamValueSource.TYPE_VALUE_SOURCE; public final class RerouteProcessor extends AbstractProcessor { @@ -39,6 +40,7 @@ public final class RerouteProcessor extends AbstractProcessor { private static final String DATA_STREAM_DATASET = DATA_STREAM_PREFIX + "dataset"; private static final String DATA_STREAM_NAMESPACE = DATA_STREAM_PREFIX + "namespace"; private static final String EVENT_DATASET = "event.dataset"; + private final List type; private final List dataset; private final List namespace; private final String destination; @@ -46,11 +48,17 @@ public final class RerouteProcessor extends AbstractProcessor { RerouteProcessor( String tag, String description, + List type, List dataset, List namespace, String destination ) { super(tag, description); + if (type.isEmpty()) { + this.type = List.of(TYPE_VALUE_SOURCE); + } else { + this.type = type; + } if (dataset.isEmpty()) { this.dataset = List.of(DATASET_VALUE_SOURCE); } else { @@ -71,7 +79,7 @@ public final class RerouteProcessor extends AbstractProcessor { return ingestDocument; } final String indexName = ingestDocument.getFieldValue(IngestDocument.Metadata.INDEX.getFieldName(), String.class); - final String type; + final String currentType; final String currentDataset; final String currentNamespace; @@ -84,10 +92,11 @@ public final class RerouteProcessor extends AbstractProcessor { if (indexOfSecondDash < 0) { throw new IllegalArgumentException(format(NAMING_SCHEME_ERROR_MESSAGE, indexName)); } - type = parseDataStreamType(indexName, indexOfFirstDash); + currentType = parseDataStreamType(indexName, indexOfFirstDash); currentDataset = parseDataStreamDataset(indexName, indexOfFirstDash, indexOfSecondDash); currentNamespace = parseDataStreamNamespace(indexName, indexOfSecondDash); + String type = determineDataStreamField(ingestDocument, this.type, currentType); String dataset = determineDataStreamField(ingestDocument, this.dataset, currentDataset); String namespace = determineDataStreamField(ingestDocument, this.namespace, currentNamespace); String newTarget = type + "-" + dataset + "-" + namespace; @@ -168,6 +177,15 @@ public final class RerouteProcessor extends AbstractProcessor { String description, Map config ) throws Exception { + List type; + try { + type = ConfigurationUtils.readOptionalListOrString(TYPE, tag, config, "type") + .stream() + .map(DataStreamValueSource::type) + .toList(); + } catch (IllegalArgumentException e) { + throw newConfigurationException(TYPE, tag, "type", e.getMessage()); + } List dataset; try { dataset = ConfigurationUtils.readOptionalListOrString(TYPE, tag, config, "dataset") @@ -188,11 +206,11 @@ public final class RerouteProcessor extends AbstractProcessor { } String destination = ConfigurationUtils.readOptionalStringProperty(TYPE, tag, config, "destination"); - if (destination != null && (dataset.isEmpty() == false || namespace.isEmpty() == false)) { - throw newConfigurationException(TYPE, tag, "destination", "can only be set if dataset and namespace are not set"); + if (destination != null && (type.isEmpty() == false || dataset.isEmpty() == false || namespace.isEmpty() == false)) { + throw newConfigurationException(TYPE, tag, "destination", "can only be set if type, dataset, and namespace are not set"); } - return new RerouteProcessor(tag, description, dataset, namespace, destination); + return new RerouteProcessor(tag, description, type, dataset, namespace, destination); } } @@ -203,8 +221,10 @@ public final class RerouteProcessor extends AbstractProcessor { private static final int MAX_LENGTH = 100; private static final String REPLACEMENT = "_"; + private static final Pattern DISALLOWED_IN_TYPE = Pattern.compile("[\\\\/*?\"<>| ,#:-]"); private static final Pattern DISALLOWED_IN_DATASET = Pattern.compile("[\\\\/*?\"<>| ,#:-]"); private static final Pattern DISALLOWED_IN_NAMESPACE = Pattern.compile("[\\\\/*?\"<>| ,#:]"); + static final DataStreamValueSource TYPE_VALUE_SOURCE = type("{{" + DATA_STREAM_TYPE + "}}"); static final DataStreamValueSource DATASET_VALUE_SOURCE = dataset("{{" + DATA_STREAM_DATASET + "}}"); static final DataStreamValueSource NAMESPACE_VALUE_SOURCE = namespace("{{" + DATA_STREAM_NAMESPACE + "}}"); @@ -212,6 +232,10 @@ public final class RerouteProcessor extends AbstractProcessor { private final String fieldReference; private final Function sanitizer; + public static DataStreamValueSource type(String type) { + return new DataStreamValueSource(type, ds -> sanitizeDataStreamField(ds, DISALLOWED_IN_TYPE)); + } + public static DataStreamValueSource dataset(String dataset) { return new DataStreamValueSource(dataset, ds -> sanitizeDataStreamField(ds, DISALLOWED_IN_DATASET)); } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RerouteProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RerouteProcessorFactoryTests.java index 07e8ba405736..e83093809cf0 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RerouteProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RerouteProcessorFactoryTests.java @@ -47,7 +47,7 @@ public class RerouteProcessorFactoryTests extends ESTestCase { ElasticsearchParseException.class, () -> create(Map.of("destination", "foo", "dataset", "bar")) ); - assertThat(e.getMessage(), equalTo("[destination] can only be set if dataset and namespace are not set")); + assertThat(e.getMessage(), equalTo("[destination] can only be set if type, dataset, and namespace are not set")); } public void testFieldReference() throws Exception { diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RerouteProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RerouteProcessorTests.java index 084be1c2b227..55eebbc16048 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RerouteProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RerouteProcessorTests.java @@ -27,16 +27,25 @@ public class RerouteProcessorTests extends ESTestCase { public void testDefaults() throws Exception { IngestDocument ingestDocument = createIngestDocument("logs-generic-default"); - RerouteProcessor processor = createRerouteProcessor(List.of(), List.of()); + RerouteProcessor processor = createRerouteProcessor(List.of(), List.of(), List.of()); processor.execute(ingestDocument); assertDataSetFields(ingestDocument, "logs", "generic", "default"); } + public void testRouteOnType() throws Exception { + IngestDocument ingestDocument = createIngestDocument("logs-generic-default"); + ingestDocument.setFieldValue("event.type", "foo"); + + RerouteProcessor processor = createRerouteProcessor(List.of("{{event.type}}"), List.of(), List.of()); + processor.execute(ingestDocument); + assertDataSetFields(ingestDocument, "foo", "generic", "default"); + } + public void testEventDataset() throws Exception { IngestDocument ingestDocument = createIngestDocument("logs-generic-default"); ingestDocument.setFieldValue("event.dataset", "foo"); - RerouteProcessor processor = createRerouteProcessor(List.of("{{event.dataset }}"), List.of()); + RerouteProcessor processor = createRerouteProcessor(List.of(), List.of("{{event.dataset }}"), List.of()); processor.execute(ingestDocument); assertDataSetFields(ingestDocument, "logs", "foo", "default"); assertThat(ingestDocument.getFieldValue("event.dataset", String.class), equalTo("foo")); @@ -46,7 +55,7 @@ public class RerouteProcessorTests extends ESTestCase { IngestDocument ingestDocument = createIngestDocument("logs-generic-default"); ingestDocument.getCtxMap().put("event.dataset", "foo"); - RerouteProcessor processor = createRerouteProcessor(List.of("{{ event.dataset}}"), List.of()); + RerouteProcessor processor = createRerouteProcessor(List.of(), List.of("{{ event.dataset}}"), List.of()); processor.execute(ingestDocument); assertDataSetFields(ingestDocument, "logs", "foo", "default"); assertThat(ingestDocument.getCtxMap().get("event.dataset"), equalTo("foo")); @@ -57,7 +66,7 @@ public class RerouteProcessorTests extends ESTestCase { IngestDocument ingestDocument = createIngestDocument("logs-generic-default"); ingestDocument.setFieldValue("ds", "foo"); - RerouteProcessor processor = createRerouteProcessor(List.of("{{ ds }}"), List.of()); + RerouteProcessor processor = createRerouteProcessor(List.of(), List.of("{{ ds }}"), List.of()); processor.execute(ingestDocument); assertDataSetFields(ingestDocument, "logs", "foo", "default"); assertFalse(ingestDocument.hasField("event.dataset")); @@ -66,8 +75,8 @@ public class RerouteProcessorTests extends ESTestCase { public void testSkipFirstProcessor() throws Exception { IngestDocument ingestDocument = createIngestDocument("logs-generic-default"); - RerouteProcessor skippedProcessor = createRerouteProcessor(List.of("skip"), List.of()); - RerouteProcessor executedProcessor = createRerouteProcessor(List.of("executed"), List.of()); + RerouteProcessor skippedProcessor = createRerouteProcessor(List.of(), List.of("skip"), List.of()); + RerouteProcessor executedProcessor = createRerouteProcessor(List.of(), List.of("executed"), List.of()); CompoundProcessor processor = new CompoundProcessor(new SkipProcessor(skippedProcessor), executedProcessor); processor.execute(ingestDocument); assertDataSetFields(ingestDocument, "logs", "executed", "default"); @@ -76,8 +85,8 @@ public class RerouteProcessorTests extends ESTestCase { public void testSkipLastProcessor() throws Exception { IngestDocument ingestDocument = createIngestDocument("logs-generic-default"); - RerouteProcessor executedProcessor = createRerouteProcessor(List.of("executed"), List.of()); - RerouteProcessor skippedProcessor = createRerouteProcessor(List.of("skip"), List.of()); + RerouteProcessor executedProcessor = createRerouteProcessor(List.of(), List.of("executed"), List.of()); + RerouteProcessor skippedProcessor = createRerouteProcessor(List.of(), List.of("skip"), List.of()); CompoundProcessor processor = new CompoundProcessor(executedProcessor, skippedProcessor); processor.execute(ingestDocument); assertDataSetFields(ingestDocument, "logs", "executed", "default"); @@ -85,23 +94,24 @@ public class RerouteProcessorTests extends ESTestCase { public void testDataStreamFieldsFromDocument() throws Exception { IngestDocument ingestDocument = createIngestDocument("logs-generic-default"); + ingestDocument.setFieldValue("data_stream.type", "eggplant"); ingestDocument.setFieldValue("data_stream.dataset", "foo"); ingestDocument.setFieldValue("data_stream.namespace", "bar"); - RerouteProcessor processor = createRerouteProcessor(List.of(), List.of()); + RerouteProcessor processor = createRerouteProcessor(List.of(), List.of(), List.of()); processor.execute(ingestDocument); - assertDataSetFields(ingestDocument, "logs", "foo", "bar"); + assertDataSetFields(ingestDocument, "eggplant", "foo", "bar"); } public void testDataStreamFieldsFromDocumentDottedNotation() throws Exception { IngestDocument ingestDocument = createIngestDocument("logs-generic-default"); - ingestDocument.getCtxMap().put("data_stream.type", "logs"); + ingestDocument.getCtxMap().put("data_stream.type", "eggplant"); ingestDocument.getCtxMap().put("data_stream.dataset", "foo"); ingestDocument.getCtxMap().put("data_stream.namespace", "bar"); - RerouteProcessor processor = createRerouteProcessor(List.of(), List.of()); + RerouteProcessor processor = createRerouteProcessor(List.of(), List.of(), List.of()); processor.execute(ingestDocument); - assertDataSetFields(ingestDocument, "logs", "foo", "bar"); + assertDataSetFields(ingestDocument, "eggplant", "foo", "bar"); } public void testInvalidDataStreamFieldsFromDocument() throws Exception { @@ -109,7 +119,7 @@ public class RerouteProcessorTests extends ESTestCase { ingestDocument.setFieldValue("data_stream.dataset", "foo-bar"); ingestDocument.setFieldValue("data_stream.namespace", "baz#qux"); - RerouteProcessor processor = createRerouteProcessor(List.of(), List.of()); + RerouteProcessor processor = createRerouteProcessor(List.of(), List.of(), List.of()); processor.execute(ingestDocument); assertDataSetFields(ingestDocument, "logs", "foo_bar", "baz_qux"); } @@ -128,7 +138,7 @@ public class RerouteProcessorTests extends ESTestCase { ingestDocument.setFieldValue("service.name", "opbeans-java"); ingestDocument.setFieldValue("service.environment", "dev"); - RerouteProcessor processor = createRerouteProcessor(List.of("{{service.name}}"), List.of("{{service.environment}}")); + RerouteProcessor processor = createRerouteProcessor(List.of(), List.of("{{service.name}}"), List.of("{{service.environment}}")); processor.execute(ingestDocument); assertDataSetFields(ingestDocument, "logs", "opbeans_java", "dev"); } @@ -136,7 +146,7 @@ public class RerouteProcessorTests extends ESTestCase { public void testRerouteToCurrentTarget() throws Exception { IngestDocument ingestDocument = createIngestDocument("logs-generic-default"); - RerouteProcessor reroute = createRerouteProcessor(List.of("generic"), List.of("default")); + RerouteProcessor reroute = createRerouteProcessor(List.of(), List.of("generic"), List.of("default")); CompoundProcessor processor = new CompoundProcessor( reroute, new TestProcessor(doc -> doc.setFieldValue("pipeline_is_continued", true)) @@ -149,7 +159,7 @@ public class RerouteProcessorTests extends ESTestCase { public void testFieldReferenceWithMissingReroutesToCurrentTarget() throws Exception { IngestDocument ingestDocument = createIngestDocument("logs-generic-default"); - RerouteProcessor reroute = createRerouteProcessor(List.of("{{service.name}}"), List.of("{{service.environment}}")); + RerouteProcessor reroute = createRerouteProcessor(List.of(), List.of("{{service.name}}"), List.of("{{service.environment}}")); CompoundProcessor processor = new CompoundProcessor( reroute, new TestProcessor(doc -> doc.setFieldValue("pipeline_is_continued", true)) @@ -166,6 +176,7 @@ public class RerouteProcessorTests extends ESTestCase { ingestDocument.setFieldValue("data_stream.namespace", "namespace_from_doc"); RerouteProcessor processor = createRerouteProcessor( + List.of(), List.of("{{{data_stream.dataset}}}", "fallback"), List.of("{{data_stream.namespace}}", "fallback") ); @@ -177,6 +188,7 @@ public class RerouteProcessorTests extends ESTestCase { IngestDocument ingestDocument = createIngestDocument("logs-generic-default"); RerouteProcessor processor = createRerouteProcessor( + List.of(), List.of("{{data_stream.dataset}}", "fallback"), List.of("{{data_stream.namespace}}", "fallback") ); @@ -190,6 +202,7 @@ public class RerouteProcessorTests extends ESTestCase { ingestDocument.setFieldValue("data_stream.namespace", "default"); RerouteProcessor processor = createRerouteProcessor( + List.of(), List.of("{{data_stream.dataset}}", "fallback"), List.of("{{{data_stream.namespace}}}", "fallback") ); @@ -202,7 +215,7 @@ public class RerouteProcessorTests extends ESTestCase { ingestDocument.setFieldValue("data_stream.dataset", "foo"); ingestDocument.setFieldValue("data_stream.namespace", "bar"); - RerouteProcessor processor = createRerouteProcessor(List.of("{{foo}}"), List.of("{{bar}}")); + RerouteProcessor processor = createRerouteProcessor(List.of(), List.of("{{foo}}"), List.of("{{bar}}")); processor.execute(ingestDocument); assertDataSetFields(ingestDocument, "logs", "generic", "default"); } @@ -210,7 +223,7 @@ public class RerouteProcessorTests extends ESTestCase { public void testInvalidDataStreamName() throws Exception { { IngestDocument ingestDocument = createIngestDocument("foo"); - RerouteProcessor processor = createRerouteProcessor(List.of(), List.of()); + RerouteProcessor processor = createRerouteProcessor(List.of(), List.of(), List.of()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument)); assertThat(e.getMessage(), equalTo("invalid data stream name: [foo]; must follow naming scheme --")); } @@ -227,11 +240,16 @@ public class RerouteProcessorTests extends ESTestCase { public void testRouteOnNonStringFieldFails() { IngestDocument ingestDocument = createIngestDocument("logs-generic-default"); ingestDocument.setFieldValue("numeric_field", 42); - RerouteProcessor processor = createRerouteProcessor(List.of("{{numeric_field}}"), List.of()); + RerouteProcessor processor = createRerouteProcessor(List.of(), List.of("{{numeric_field}}"), List.of()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument)); assertThat(e.getMessage(), equalTo("field [numeric_field] of type [java.lang.Integer] cannot be cast to [java.lang.String]")); } + public void testTypeSanitization() { + assertTypeSanitization("\\/*?\"<>| ,#:-", "_____________"); + assertTypeSanitization("foo*bar", "foo_bar"); + } + public void testDatasetSanitization() { assertDatasetSanitization("\\/*?\"<>| ,#:-", "_____________"); assertDatasetSanitization("foo*bar", "foo_bar"); @@ -242,6 +260,14 @@ public class RerouteProcessorTests extends ESTestCase { assertNamespaceSanitization("foo*bar", "foo_bar"); } + private static void assertTypeSanitization(String type, String sanitizedType) { + assertThat( + RerouteProcessor.DataStreamValueSource.type("{{foo}}") + .resolve(RandomDocumentPicks.randomIngestDocument(random(), Map.of("foo", type))), + equalTo(sanitizedType) + ); + } + private static void assertDatasetSanitization(String dataset, String sanitizedDataset) { assertThat( RerouteProcessor.DataStreamValueSource.dataset("{{foo}}") @@ -258,10 +284,11 @@ public class RerouteProcessorTests extends ESTestCase { ); } - private RerouteProcessor createRerouteProcessor(List dataset, List namespace) { + private RerouteProcessor createRerouteProcessor(List type, List dataset, List namespace) { return new RerouteProcessor( null, null, + type.stream().map(RerouteProcessor.DataStreamValueSource::type).toList(), dataset.stream().map(RerouteProcessor.DataStreamValueSource::dataset).toList(), namespace.stream().map(RerouteProcessor.DataStreamValueSource::namespace).toList(), null @@ -269,7 +296,7 @@ public class RerouteProcessorTests extends ESTestCase { } private RerouteProcessor createRerouteProcessor(String destination) { - return new RerouteProcessor(null, null, List.of(), List.of(), destination); + return new RerouteProcessor(null, null, List.of(), List.of(), List.of(), destination); } private void assertDataSetFields(IngestDocument ingestDocument, String type, String dataset, String namespace) { From d30b3ca3f4292ca3e2980c74e0f38d0e991f8273 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Tue, 18 Feb 2025 12:31:29 -0800 Subject: [PATCH 26/29] Prepare for 9.0.0-rc1 --- .buildkite/scripts/dra-update-staging.sh | 2 +- .buildkite/scripts/dra-workflow.trigger.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.buildkite/scripts/dra-update-staging.sh b/.buildkite/scripts/dra-update-staging.sh index e168dbf733ea..2e2462ccc215 100755 --- a/.buildkite/scripts/dra-update-staging.sh +++ b/.buildkite/scripts/dra-update-staging.sh @@ -37,7 +37,7 @@ for BRANCH in "${BRANCHES[@]}"; do if [[ "$SHOULD_TRIGGER" == "true" ]]; then if [[ "$BRANCH" == "9.0" ]]; then - export VERSION_QUALIFIER="beta1" + export VERSION_QUALIFIER="rc1" fi echo "Triggering DRA staging workflow for $BRANCH" cat << EOF | buildkite-agent pipeline upload diff --git a/.buildkite/scripts/dra-workflow.trigger.sh b/.buildkite/scripts/dra-workflow.trigger.sh index 71998b06bd3d..e54348a81ee0 100755 --- a/.buildkite/scripts/dra-workflow.trigger.sh +++ b/.buildkite/scripts/dra-workflow.trigger.sh @@ -8,7 +8,7 @@ source .buildkite/scripts/branches.sh for BRANCH in "${BRANCHES[@]}"; do if [[ "$BRANCH" == "9.0" ]]; then - export VERSION_QUALIFIER="beta1" + export VERSION_QUALIFIER="rc1" fi INTAKE_PIPELINE_SLUG="elasticsearch-intake" From 9dedcf7ebc3ee5e9e4411b54d672c60c72e01386 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Tue, 18 Feb 2025 13:41:45 -0800 Subject: [PATCH 27/29] Re-enable shard rebalance in ES|QL rest tests (#122889) Today, we disable shard rebalancing in ES|QL REST tests to avoid failures when shards are rebalanced while the query is being executed. This is no longer necessary if the clusters are on version 8.19 or 9.1. --- .../xpack/esql/qa/mixed/Clusters.java | 15 ++++++--- .../xpack/esql/ccq/Clusters.java | 31 +++++++++++++------ 2 files changed, 32 insertions(+), 14 deletions(-) diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/Clusters.java b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/Clusters.java index 8a55624ed3a6..d91e092e312e 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/Clusters.java +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/Clusters.java @@ -14,15 +14,22 @@ import org.elasticsearch.test.cluster.util.Version; public class Clusters { public static ElasticsearchCluster mixedVersionCluster() { Version oldVersion = Version.fromString(System.getProperty("tests.old_cluster_version")); - return ElasticsearchCluster.local() + var cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) .withNode(node -> node.version(oldVersion)) .withNode(node -> node.version(Version.CURRENT)) .withNode(node -> node.version(oldVersion)) .withNode(node -> node.version(Version.CURRENT)) .setting("xpack.security.enabled", "false") - .setting("xpack.license.self_generated.type", "trial") - .setting("cluster.routing.rebalance.enable", "none") // disable relocation until we have retry in ESQL - .build(); + .setting("xpack.license.self_generated.type", "trial"); + if (supportRetryOnShardFailures(oldVersion) == false) { + cluster.setting("cluster.routing.rebalance.enable", "none"); + } + return cluster.build(); + } + + private static boolean supportRetryOnShardFailures(Version version) { + return version.onOrAfter(Version.fromString("9.1.0")) + || (version.onOrAfter(Version.fromString("8.19.0")) && version.before(Version.fromString("9.0.0"))); } } diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/Clusters.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/Clusters.java index 6a1b9f9d051e..26e6ec81584c 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/Clusters.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/Clusters.java @@ -17,17 +17,20 @@ public class Clusters { static final String LOCAL_CLUSTER_NAME = "local_cluster"; public static ElasticsearchCluster remoteCluster() { - return ElasticsearchCluster.local() + Version version = distributionVersion("tests.version.remote_cluster"); + var cluster = ElasticsearchCluster.local() .name(REMOTE_CLUSTER_NAME) .distribution(DistributionType.DEFAULT) - .version(distributionVersion("tests.version.remote_cluster")) + .version(version) .nodes(2) .setting("node.roles", "[data,ingest,master]") .setting("xpack.security.enabled", "false") .setting("xpack.license.self_generated.type", "trial") - .shared(true) - .setting("cluster.routing.rebalance.enable", "none") - .build(); + .shared(true); + if (supportRetryOnShardFailures(version) == false) { + cluster.setting("cluster.routing.rebalance.enable", "none"); + } + return cluster.build(); } public static ElasticsearchCluster localCluster(ElasticsearchCluster remoteCluster) { @@ -35,10 +38,11 @@ public class Clusters { } public static ElasticsearchCluster localCluster(ElasticsearchCluster remoteCluster, Boolean skipUnavailable) { - return ElasticsearchCluster.local() + Version version = distributionVersion("tests.version.local_cluster"); + var cluster = ElasticsearchCluster.local() .name(LOCAL_CLUSTER_NAME) .distribution(DistributionType.DEFAULT) - .version(distributionVersion("tests.version.local_cluster")) + .version(version) .nodes(2) .setting("xpack.security.enabled", "false") .setting("xpack.license.self_generated.type", "trial") @@ -46,9 +50,11 @@ public class Clusters { .setting("cluster.remote.remote_cluster.seeds", () -> "\"" + remoteCluster.getTransportEndpoint(0) + "\"") .setting("cluster.remote.connections_per_cluster", "1") .setting("cluster.remote." + REMOTE_CLUSTER_NAME + ".skip_unavailable", skipUnavailable.toString()) - .shared(true) - .setting("cluster.routing.rebalance.enable", "none") - .build(); + .shared(true); + if (supportRetryOnShardFailures(version) == false) { + cluster.setting("cluster.routing.rebalance.enable", "none"); + } + return cluster.build(); } public static org.elasticsearch.Version localClusterVersion() { @@ -65,4 +71,9 @@ public class Clusters { final String val = System.getProperty(key); return val != null ? Version.fromString(val) : Version.CURRENT; } + + private static boolean supportRetryOnShardFailures(Version version) { + return version.onOrAfter(Version.fromString("9.1.0")) + || (version.onOrAfter(Version.fromString("8.19.0")) && version.before(Version.fromString("9.0.0"))); + } } From cffbccbb498482536eaca3a964c14fcd98423b8a Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 18 Feb 2025 14:13:55 -0800 Subject: [PATCH 28/29] Add support for setting based file entitlements (#122656) With Security Manager we have SecuredConfigFileSettingAccessPermission. This commit adds an entitlement equivalent. With each entry in files entitlement, a `path_setting` can now be used. The value may be an explicit setting, or a setting glob with a single `*`. relates ES-10844 --- .../bootstrap/EntitlementBootstrap.java | 27 ++++- .../EntitlementInitialization.java | 10 +- .../runtime/policy/FileAccessTree.java | 5 +- .../runtime/policy/PathLookup.java | 11 +- .../policy/entitlements/FilesEntitlement.java | 110 ++++++++++++++---- .../runtime/policy/FileAccessTreeTests.java | 17 +-- .../runtime/policy/PolicyManagerTests.java | 5 +- .../policy/PolicyParserFailureTests.java | 6 +- .../runtime/policy/PolicyParserTests.java | 9 +- .../entitlements/FilesEntitlementTests.java | 50 +++++++- .../bootstrap/Elasticsearch.java | 6 +- .../common/settings/Settings.java | 18 +++ .../common/settings/SettingsTests.java | 6 + 13 files changed, 230 insertions(+), 50 deletions(-) diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java index 364c81bf2d26..17885fb10307 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java @@ -28,6 +28,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.Map; import java.util.function.Function; +import java.util.stream.Stream; import static java.util.Objects.requireNonNull; @@ -36,19 +37,24 @@ public class EntitlementBootstrap { public record BootstrapArgs( Map pluginPolicies, Function, String> pluginResolver, + Function settingResolver, + Function> settingGlobResolver, Path[] dataDirs, Path configDir, - Path tempDir, - Path logsDir + Path logsDir, + Path tempDir ) { public BootstrapArgs { requireNonNull(pluginPolicies); requireNonNull(pluginResolver); + requireNonNull(settingResolver); + requireNonNull(settingGlobResolver); requireNonNull(dataDirs); if (dataDirs.length == 0) { throw new IllegalArgumentException("must provide at least one data directory"); } requireNonNull(configDir); + requireNonNull(logsDir); requireNonNull(tempDir); } } @@ -73,16 +79,27 @@ public class EntitlementBootstrap { public static void bootstrap( Map pluginPolicies, Function, String> pluginResolver, + Function settingResolver, + Function> settingGlobResolver, Path[] dataDirs, Path configDir, - Path tempDir, - Path logsDir + Path logsDir, + Path tempDir ) { logger.debug("Loading entitlement agent"); if (EntitlementBootstrap.bootstrapArgs != null) { throw new IllegalStateException("plugin data is already set"); } - EntitlementBootstrap.bootstrapArgs = new BootstrapArgs(pluginPolicies, pluginResolver, dataDirs, configDir, tempDir, logsDir); + EntitlementBootstrap.bootstrapArgs = new BootstrapArgs( + pluginPolicies, + pluginResolver, + settingResolver, + settingGlobResolver, + dataDirs, + configDir, + logsDir, + tempDir + ); exportInitializationToAgent(); loadAgent(findAgentJar()); selfTest(); diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java index aee731cdd762..0e501b7107a7 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java @@ -135,8 +135,14 @@ public class EntitlementInitialization { private static PolicyManager createPolicyManager() { EntitlementBootstrap.BootstrapArgs bootstrapArgs = EntitlementBootstrap.bootstrapArgs(); Map pluginPolicies = bootstrapArgs.pluginPolicies(); - var pathLookup = new PathLookup(getUserHome(), bootstrapArgs.configDir(), bootstrapArgs.dataDirs(), bootstrapArgs.tempDir()); - Path logsDir = EntitlementBootstrap.bootstrapArgs().logsDir(); + var pathLookup = new PathLookup( + getUserHome(), + bootstrapArgs.configDir(), + bootstrapArgs.dataDirs(), + bootstrapArgs.tempDir(), + bootstrapArgs.settingResolver(), + bootstrapArgs.settingGlobResolver() + ); List serverScopes = new ArrayList<>(); Collections.addAll( diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java index 46ee46c7b30c..98076af51ae6 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java @@ -42,8 +42,9 @@ public final class FileAccessTree { } // everything has access to the temp dir - readPaths.add(pathLookup.tempDir().toString()); - writePaths.add(pathLookup.tempDir().toString()); + String tempDir = normalizePath(pathLookup.tempDir()); + readPaths.add(tempDir); + writePaths.add(tempDir); readPaths.sort(String::compareTo); writePaths.sort(String::compareTo); diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PathLookup.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PathLookup.java index 5790e7245aad..6f6220e21546 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PathLookup.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PathLookup.java @@ -10,5 +10,14 @@ package org.elasticsearch.entitlement.runtime.policy; import java.nio.file.Path; +import java.util.function.Function; +import java.util.stream.Stream; -public record PathLookup(Path homeDir, Path configDir, Path[] dataDirs, Path tempDir) {} +public record PathLookup( + Path homeDir, + Path configDir, + Path[] dataDirs, + Path tempDir, + Function settingResolver, + Function> settingGlobResolver +) {} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlement.java index 3859de2dc2f9..21b54ba51ca8 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlement.java @@ -15,7 +15,6 @@ import org.elasticsearch.entitlement.runtime.policy.PolicyValidationException; import java.nio.file.Path; import java.util.ArrayList; -import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -53,6 +52,43 @@ public record FilesEntitlement(List filesData) implements Entitlement static FileData ofRelativePath(Path relativePath, BaseDir baseDir, Mode mode) { return new RelativePathFileData(relativePath, baseDir, mode); } + + static FileData ofPathSetting(String setting, Mode mode) { + return new PathSettingFileData(setting, mode); + } + + static FileData ofRelativePathSetting(String setting, BaseDir baseDir, Mode mode) { + return new RelativePathSettingFileData(setting, baseDir, mode); + } + } + + private sealed interface RelativeFileData extends FileData { + BaseDir baseDir(); + + Stream resolveRelativePaths(PathLookup pathLookup); + + @Override + default Stream resolvePaths(PathLookup pathLookup) { + Objects.requireNonNull(pathLookup); + var relativePaths = resolveRelativePaths(pathLookup); + switch (baseDir()) { + case CONFIG: + return relativePaths.map(relativePath -> pathLookup.configDir().resolve(relativePath)); + case DATA: + // multiple data dirs are a pain...we need the combination of relative paths and data dirs + List paths = new ArrayList<>(); + for (var relativePath : relativePaths.toList()) { + for (var dataDir : pathLookup.dataDirs()) { + paths.add(dataDir.resolve(relativePath)); + } + } + return paths.stream(); + case HOME: + return relativePaths.map(relativePath -> pathLookup.homeDir().resolve(relativePath)); + default: + throw new IllegalArgumentException(); + } + } } private record AbsolutePathFileData(Path path, Mode mode) implements FileData { @@ -62,24 +98,35 @@ public record FilesEntitlement(List filesData) implements Entitlement } } - private record RelativePathFileData(Path relativePath, BaseDir baseDir, Mode mode) implements FileData { + private record RelativePathFileData(Path relativePath, BaseDir baseDir, Mode mode) implements FileData, RelativeFileData { + @Override + public Stream resolveRelativePaths(PathLookup pathLookup) { + return Stream.of(relativePath); + } + } + private record PathSettingFileData(String setting, Mode mode) implements FileData { @Override public Stream resolvePaths(PathLookup pathLookup) { - Objects.requireNonNull(pathLookup); - switch (baseDir) { - case CONFIG: - return Stream.of(pathLookup.configDir().resolve(relativePath)); - case DATA: - return Arrays.stream(pathLookup.dataDirs()).map(d -> d.resolve(relativePath)); - case HOME: - return Stream.of(pathLookup.homeDir().resolve(relativePath)); - default: - throw new IllegalArgumentException(); - } + return resolvePathSettings(pathLookup, setting); } } + private record RelativePathSettingFileData(String setting, BaseDir baseDir, Mode mode) implements FileData, RelativeFileData { + @Override + public Stream resolveRelativePaths(PathLookup pathLookup) { + return resolvePathSettings(pathLookup, setting); + } + } + + private static Stream resolvePathSettings(PathLookup pathLookup, String setting) { + if (setting.contains("*")) { + return pathLookup.settingGlobResolver().apply(setting).map(Path::of); + } + String path = pathLookup.settingResolver().apply(setting); + return path == null ? Stream.of() : Stream.of(Path.of(path)); + } + private static Mode parseMode(String mode) { if (mode.equals("read")) { return Mode.READ; @@ -113,37 +160,56 @@ public record FilesEntitlement(List filesData) implements Entitlement String pathAsString = file.remove("path"); String relativePathAsString = file.remove("relative_path"); String relativeTo = file.remove("relative_to"); - String mode = file.remove("mode"); + String pathSetting = file.remove("path_setting"); + String relativePathSetting = file.remove("relative_path_setting"); + String modeAsString = file.remove("mode"); if (file.isEmpty() == false) { throw new PolicyValidationException("unknown key(s) [" + file + "] in a listed file for files entitlement"); } - if (mode == null) { + int foundKeys = (pathAsString != null ? 1 : 0) + (relativePathAsString != null ? 1 : 0) + (pathSetting != null ? 1 : 0) + + (relativePathSetting != null ? 1 : 0); + if (foundKeys != 1) { + throw new PolicyValidationException( + "a files entitlement entry must contain one of " + "[path, relative_path, path_setting, relative_path_setting]" + ); + } + + if (modeAsString == null) { throw new PolicyValidationException("files entitlement must contain 'mode' for every listed file"); } - if (pathAsString != null && relativePathAsString != null) { - throw new PolicyValidationException("a files entitlement entry cannot contain both 'path' and 'relative_path'"); + Mode mode = parseMode(modeAsString); + + BaseDir baseDir = null; + if (relativeTo != null) { + baseDir = parseBaseDir(relativeTo); } if (relativePathAsString != null) { - if (relativeTo == null) { + if (baseDir == null) { throw new PolicyValidationException("files entitlement with a 'relative_path' must specify 'relative_to'"); } - final BaseDir baseDir = parseBaseDir(relativeTo); Path relativePath = Path.of(relativePathAsString); if (relativePath.isAbsolute()) { throw new PolicyValidationException("'relative_path' [" + relativePathAsString + "] must be relative"); } - filesData.add(FileData.ofRelativePath(relativePath, baseDir, parseMode(mode))); + filesData.add(FileData.ofRelativePath(relativePath, baseDir, mode)); } else if (pathAsString != null) { Path path = Path.of(pathAsString); if (path.isAbsolute() == false) { throw new PolicyValidationException("'path' [" + pathAsString + "] must be absolute"); } - filesData.add(FileData.ofPath(path, parseMode(mode))); + filesData.add(FileData.ofPath(path, mode)); + } else if (pathSetting != null) { + filesData.add(FileData.ofPathSetting(pathSetting, mode)); + } else if (relativePathSetting != null) { + if (baseDir == null) { + throw new PolicyValidationException("files entitlement with a 'relative_path_setting' must specify 'relative_to'"); + } + filesData.add(FileData.ofRelativePathSetting(relativePathSetting, baseDir, mode)); } else { - throw new PolicyValidationException("files entitlement must contain either 'path' or 'relative_path' for every entry"); + throw new AssertionError("File entry validation error"); } } return new FilesEntitlement(filesData); diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java index 37b2bfb19f81..218fc0c95672 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.entitlement.runtime.policy; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement; import org.elasticsearch.test.ESTestCase; import org.junit.BeforeClass; @@ -25,10 +26,12 @@ import static org.hamcrest.Matchers.is; public class FileAccessTreeTests extends ESTestCase { static Path root; + static Settings settings; @BeforeClass public static void setupRoot() { root = createTempDir(); + settings = Settings.EMPTY; } private static Path path(String s) { @@ -39,7 +42,9 @@ public class FileAccessTreeTests extends ESTestCase { Path.of("/home"), Path.of("/config"), new Path[] { Path.of("/data1"), Path.of("/data2") }, - Path.of("/tmp") + Path.of("/tmp"), + setting -> settings.get(setting), + glob -> settings.getGlobValues(glob) ); public void testEmpty() { @@ -163,13 +168,9 @@ public class FileAccessTreeTests extends ESTestCase { } public void testTempDirAccess() { - Path tempDir = createTempDir(); - var tree = FileAccessTree.of( - FilesEntitlement.EMPTY, - new PathLookup(Path.of("/home"), Path.of("/config"), new Path[] { Path.of("/data1"), Path.of("/data2") }, tempDir) - ); - assertThat(tree.canRead(tempDir), is(true)); - assertThat(tree.canWrite(tempDir), is(true)); + var tree = FileAccessTree.of(FilesEntitlement.EMPTY, TEST_PATH_LOOKUP); + assertThat(tree.canRead(TEST_PATH_LOOKUP.tempDir()), is(true)); + assertThat(tree.canWrite(TEST_PATH_LOOKUP.tempDir()), is(true)); } FileAccessTree accessTree(FilesEntitlement entitlement) { diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java index d63a9f4bcadf..2b5700d03e71 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.entitlement.runtime.policy; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.entitlement.runtime.policy.PolicyManager.ModuleEntitlements; import org.elasticsearch.entitlement.runtime.policy.agent.TestAgent; import org.elasticsearch.entitlement.runtime.policy.agent.inner.TestInnerAgent; @@ -68,7 +69,9 @@ public class PolicyManagerTests extends ESTestCase { TEST_BASE_DIR.resolve("/user/home"), TEST_BASE_DIR.resolve("/config"), new Path[] { TEST_BASE_DIR.resolve("/data1/"), TEST_BASE_DIR.resolve("/data2") }, - TEST_BASE_DIR.resolve("/temp") + TEST_BASE_DIR.resolve("/temp"), + Settings.EMPTY::get, + Settings.EMPTY::getGlobValues ); } catch (Exception e) { throw new IllegalStateException(e); diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java index f64fe33158dd..5bf582b894c3 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java @@ -74,7 +74,8 @@ public class PolicyParserFailureTests extends ESTestCase { """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml", false).parsePolicy()); assertEquals( "[2:5] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " - + "for entitlement type [files]: a files entitlement entry cannot contain both 'path' and 'relative_path'", + + "for entitlement type [files]: a files entitlement entry must contain one of " + + "[path, relative_path, path_setting, relative_path_setting]", ppe.getMessage() ); } @@ -87,7 +88,8 @@ public class PolicyParserFailureTests extends ESTestCase { """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml", false).parsePolicy()); assertEquals( "[2:5] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " - + "for entitlement type [files]: files entitlement must contain either 'path' or 'relative_path' for every entry", + + "for entitlement type [files]: a files entitlement entry must contain one of " + + "[path, relative_path, path_setting, relative_path_setting]", ppe.getMessage() ); } diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java index 9af743f81715..45c523c58b7e 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java @@ -182,6 +182,11 @@ public class PolicyParserTests extends ESTestCase { mode: "read" - path: '%s' mode: "read_write" + - path_setting: foo.bar + mode: read + - relative_path_setting: foo.bar + relative_to: config + mode: read """, relativePathToFile, relativePathToDir, TEST_ABSOLUTE_PATH_TO_FILE).getBytes(StandardCharsets.UTF_8)), "test-policy.yaml", false @@ -196,7 +201,9 @@ public class PolicyParserTests extends ESTestCase { List.of( Map.of("relative_path", relativePathToFile, "mode", "read_write", "relative_to", "data"), Map.of("relative_path", relativePathToDir, "mode", "read", "relative_to", "config"), - Map.of("path", TEST_ABSOLUTE_PATH_TO_FILE, "mode", "read_write") + Map.of("path", TEST_ABSOLUTE_PATH_TO_FILE, "mode", "read_write"), + Map.of("path_setting", "foo.bar", "mode", "read"), + Map.of("relative_path_setting", "foo.bar", "relative_to", "config", "mode", "read") ) ) ) diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlementTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlementTests.java index 511299ba73c6..b871e8ccf3e6 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlementTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/entitlements/FilesEntitlementTests.java @@ -9,20 +9,42 @@ package org.elasticsearch.entitlement.runtime.policy.entitlements; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.entitlement.runtime.policy.PathLookup; import org.elasticsearch.entitlement.runtime.policy.PolicyValidationException; +import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement.FileData; import org.elasticsearch.test.ESTestCase; +import org.junit.BeforeClass; import java.nio.file.Path; import java.util.List; import java.util.Map; +import static org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement.Mode.READ; import static org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement.Mode.READ_WRITE; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.is; public class FilesEntitlementTests extends ESTestCase { + static Settings settings; + + @BeforeClass + public static void setupRoot() { + settings = Settings.EMPTY; + } + + private static final PathLookup TEST_PATH_LOOKUP = new PathLookup( + Path.of("home"), + Path.of("/config"), + new Path[] { Path.of("/data1"), Path.of("/data2") }, + Path.of("/tmp"), + setting -> settings.get(setting), + glob -> settings.getGlobValues(glob) + ); + public void testEmptyBuild() { PolicyValidationException pve = expectThrows(PolicyValidationException.class, () -> FilesEntitlement.build(List.of())); assertEquals("must specify at least one path", pve.getMessage()); @@ -39,10 +61,30 @@ public class FilesEntitlementTests extends ESTestCase { } public void testFileDataRelativeWithEmptyDirectory() { - var fileData = FilesEntitlement.FileData.ofRelativePath(Path.of(""), FilesEntitlement.BaseDir.DATA, READ_WRITE); - var dataDirs = fileData.resolvePaths( - new PathLookup(Path.of("/home"), Path.of("/config"), new Path[] { Path.of("/data1/"), Path.of("/data2") }, Path.of("/temp")) - ); + var fileData = FileData.ofRelativePath(Path.of(""), FilesEntitlement.BaseDir.DATA, READ_WRITE); + var dataDirs = fileData.resolvePaths(TEST_PATH_LOOKUP); assertThat(dataDirs.toList(), contains(Path.of("/data1/"), Path.of("/data2"))); } + + public void testPathSettingResolve() { + var entitlement = FilesEntitlement.build(List.of(Map.of("path_setting", "foo.bar", "mode", "read"))); + var filesData = entitlement.filesData(); + assertThat(filesData, contains(FileData.ofPathSetting("foo.bar", READ))); + + var fileData = FileData.ofPathSetting("foo.bar", READ); + // empty settings + assertThat(fileData.resolvePaths(TEST_PATH_LOOKUP).toList(), empty()); + + fileData = FileData.ofPathSetting("foo.bar", READ); + settings = Settings.builder().put("foo.bar", "/setting/path").build(); + assertThat(fileData.resolvePaths(TEST_PATH_LOOKUP).toList(), contains(Path.of("/setting/path"))); + + fileData = FileData.ofPathSetting("foo.*.bar", READ); + settings = Settings.builder().put("foo.baz.bar", "/setting/path").build(); + assertThat(fileData.resolvePaths(TEST_PATH_LOOKUP).toList(), contains(Path.of("/setting/path"))); + + fileData = FileData.ofPathSetting("foo.*.bar", READ); + settings = Settings.builder().put("foo.baz.bar", "/setting/path").put("foo.baz2.bar", "/other/path").build(); + assertThat(fileData.resolvePaths(TEST_PATH_LOOKUP).toList(), containsInAnyOrder(Path.of("/setting/path"), Path.of("/other/path"))); + } } diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index 6e07c7012cc0..429d2584b0fa 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -245,10 +245,12 @@ class Elasticsearch { EntitlementBootstrap.bootstrap( pluginPolicies, pluginsResolver::resolveClassToPluginName, + nodeEnv.settings()::get, + nodeEnv.settings()::getGlobValues, nodeEnv.dataDirs(), nodeEnv.configDir(), - nodeEnv.tmpDir(), - nodeEnv.logsDir() + nodeEnv.logsDir(), + nodeEnv.tmpDir() ); } else { assert RuntimeVersionFeature.isSecurityManagerAvailable(); diff --git a/server/src/main/java/org/elasticsearch/common/settings/Settings.java b/server/src/main/java/org/elasticsearch/common/settings/Settings.java index 09773ba1de35..05d4212de0b0 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Settings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Settings.java @@ -290,6 +290,24 @@ public final class Settings implements ToXContentFragment, Writeable, Diffable getGlobValues(String settingGlob) { + int globIndex = settingGlob.indexOf(".*."); + if (globIndex == -1) { + throw new IllegalArgumentException("Pattern [" + settingGlob + "] does not contain a glob [*]"); + } + String prefix = settingGlob.substring(0, globIndex + 1); + String suffix = settingGlob.substring(globIndex + 2); + Settings subSettings = getByPrefix(prefix); + return subSettings.names().stream().map(k -> k + suffix).map(subSettings::get).filter(Objects::nonNull); + } + /** * Returns the setting value (as float) associated with the setting key. If it does not exists, * returns the default value provided. diff --git a/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java b/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java index d0f17f6a495d..0d5dbdec8360 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java @@ -703,4 +703,10 @@ public class SettingsTests extends ESTestCase { {"ant.bee":{"cat.dog":{"ewe":"value3"},"cat":"value2"},"ant":"value1"}""", Strings.toString(builder)); } + public void testGlobValues() throws IOException { + Settings test = Settings.builder().put("foo.x.bar", "1").put("foo.y.bar", "2").build(); + var values = test.getGlobValues("foo.*.bar").toList(); + assertThat(values, containsInAnyOrder("1", "2")); + } + } From 5160113880d5eb619706edf8671131319ac9beb2 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 19 Feb 2025 10:08:38 +1100 Subject: [PATCH 29/29] Mute org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT test {yaml=reference/cat/allocation/cat-allocation-example} #121976 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 4e51e313f39f..44995b2b99f9 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -323,6 +323,9 @@ tests: - class: org.elasticsearch.snapshots.DedicatedClusterSnapshotRestoreIT method: testRestoreShrinkIndex issue: https://github.com/elastic/elasticsearch/issues/121717 +- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT + method: test {yaml=reference/cat/allocation/cat-allocation-example} + issue: https://github.com/elastic/elasticsearch/issues/121976 # Examples: #