From 1292580c0333a4f7402cb0d133f2a991a5302d1b Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Mon, 7 Oct 2024 14:52:42 +0200 Subject: [PATCH 01/85] [DOCS] Lookup runtime fields are now GA (#114221) --- docs/reference/mapping/runtime.asciidoc | 2 -- 1 file changed, 2 deletions(-) diff --git a/docs/reference/mapping/runtime.asciidoc b/docs/reference/mapping/runtime.asciidoc index 190081fa801b..1ee119427906 100644 --- a/docs/reference/mapping/runtime.asciidoc +++ b/docs/reference/mapping/runtime.asciidoc @@ -821,8 +821,6 @@ address. [[lookup-runtime-fields]] ==== Retrieve fields from related indices -experimental[] - The <> parameter on the `_search` API can also be used to retrieve fields from the related indices via runtime fields with a type of `lookup`. From dcbbbabd3d1b78bd4658ab69e85e7c55bcc8fced Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 8 Oct 2024 00:10:38 +1100 Subject: [PATCH 02/85] Mute org.elasticsearch.xpack.inference.services.openai.OpenAiServiceTests testInfer_StreamRequest #114232 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 5754e6f6f415..7764c0f8865d 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -377,6 +377,9 @@ tests: - class: org.elasticsearch.xpack.ilm.ExplainLifecycleIT method: testStepInfoPreservedOnAutoRetry issue: https://github.com/elastic/elasticsearch/issues/114220 +- class: org.elasticsearch.xpack.inference.services.openai.OpenAiServiceTests + method: testInfer_StreamRequest + issue: https://github.com/elastic/elasticsearch/issues/114232 # Examples: # From 9368bbeb193b1d2e7f33af90909311c86fc9a948 Mon Sep 17 00:00:00 2001 From: Larry Gregory Date: Mon, 7 Oct 2024 09:29:58 -0400 Subject: [PATCH 03/85] Adds manage_inference cluster privilege to kibana_system role (#114051) * Adds manage_inference cluster privilege to kibana_system role * Fix * this is what I get for not using a real IDE * Remove whitespace --------- Co-authored-by: Elastic Machine --- .../authz/store/KibanaOwnedReservedRoleDescriptors.java | 2 ++ .../core/security/authz/store/ReservedRolesStoreTests.java | 5 +++++ 2 files changed, 7 insertions(+) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java index 91a8ea4d368f..6c28c6f3053a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java @@ -65,6 +65,8 @@ class KibanaOwnedReservedRoleDescriptors { new String[] { "monitor", "manage_index_templates", + // manage_inference required for Kibana's inference plugin to setup an ELSER endpoint. + "manage_inference", MonitoringBulkAction.NAME, "manage_saml", "manage_token", diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index acf530fb7c5c..26b306d6f133 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -436,6 +436,11 @@ public class ReservedRolesStoreTests extends ESTestCase { assertThat(kibanaRole.cluster().check(ClusterUpdateSettingsAction.NAME, request, authentication), is(false)); assertThat(kibanaRole.cluster().check(MonitoringBulkAction.NAME, request, authentication), is(true)); + // Inference + assertTrue(kibanaRole.cluster().check("cluster:admin/xpack/inference/get", request, authentication)); + assertTrue(kibanaRole.cluster().check("cluster:admin/xpack/inference/put", request, authentication)); + assertTrue(kibanaRole.cluster().check("cluster:admin/xpack/inference/delete", request, authentication)); + // Enrich assertThat(kibanaRole.cluster().check("cluster:admin/xpack/enrich/put", request, authentication), is(true)); assertThat(kibanaRole.cluster().check("cluster:admin/xpack/enrich/execute", request, authentication), is(true)); From fe36a4543d05fff2a74e70a6cfcdb9dacdea5438 Mon Sep 17 00:00:00 2001 From: Parker Timmins Date: Mon, 7 Oct 2024 07:34:05 -0600 Subject: [PATCH 04/85] Make randomInstantBetween return in range [minInstant, maxInstant] (#114177) randomInstantBetween can produce a result which is not within the [minInstant, maxInstant] range. This occurs when the epoch second picked matches the min bound and the nanos are below the min nanos, or the second picked matches the max bound seconds and nanos are above the max bound nanos. This change fixes the function by setting a bound on which nano values can be picked if the min or max epoch second value is picked. --- docs/changelog/114177.yaml | 5 +++++ .../src/main/java/org/elasticsearch/test/ESTestCase.java | 9 +++++---- 2 files changed, 10 insertions(+), 4 deletions(-) create mode 100644 docs/changelog/114177.yaml diff --git a/docs/changelog/114177.yaml b/docs/changelog/114177.yaml new file mode 100644 index 000000000000..d68486469d79 --- /dev/null +++ b/docs/changelog/114177.yaml @@ -0,0 +1,5 @@ +pr: 114177 +summary: "Make `randomInstantBetween` always return value in range [minInstant, `maxInstant]`" +area: Infra/Metrics +type: bug +issues: [] diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 068a666d78d7..31c8e5bc3d45 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -900,10 +900,11 @@ public abstract class ESTestCase extends LuceneTestCase { * @return a random instant between a min and a max value with a random nanosecond precision */ public static Instant randomInstantBetween(Instant minInstant, Instant maxInstant) { - return Instant.ofEpochSecond( - randomLongBetween(minInstant.getEpochSecond(), maxInstant.getEpochSecond()), - randomLongBetween(0, 999999999) - ); + long epochSecond = randomLongBetween(minInstant.getEpochSecond(), maxInstant.getEpochSecond()); + long minNanos = epochSecond == minInstant.getEpochSecond() ? minInstant.getNano() : 0; + long maxNanos = epochSecond == maxInstant.getEpochSecond() ? maxInstant.getNano() : 999999999; + long nanos = randomLongBetween(minNanos, maxNanos); + return Instant.ofEpochSecond(epochSecond, nanos); } /** From e1bba9b390ada7510d52e9c9460843884df42961 Mon Sep 17 00:00:00 2001 From: moxarth-elastic <96762084+moxarth-elastic@users.noreply.github.com> Date: Mon, 7 Oct 2024 19:18:32 +0530 Subject: [PATCH 05/85] [Zoom] Update existing scopes with granular scopes (#113994) --- .../connector/docs/connectors-zoom.asciidoc | 40 +++++++++++-------- 1 file changed, 24 insertions(+), 16 deletions(-) diff --git a/docs/reference/connector/docs/connectors-zoom.asciidoc b/docs/reference/connector/docs/connectors-zoom.asciidoc index d01b9c2be036..d945a0aec3da 100644 --- a/docs/reference/connector/docs/connectors-zoom.asciidoc +++ b/docs/reference/connector/docs/connectors-zoom.asciidoc @@ -63,18 +63,22 @@ To connect to Zoom you need to https://developers.zoom.us/docs/internal-apps/s2s 6. Click on the "Create" button to create the app registration. 7. After the registration is complete, you will be redirected to the app's overview page. Take note of the "App Credentials" value, as you'll need it later. 8. Navigate to the "Scopes" section and click on the "Add Scopes" button. -9. The following scopes need to be added to the app. +9. The following granular scopes need to be added to the app. + [source,bash] ---- -user:read:admin -meeting:read:admin -chat_channel:read:admin -recording:read:admin -chat_message:read:admin -report:read:admin +user:read:list_users:admin +meeting:read:list_meetings:admin +meeting:read:list_past_participants:admin +cloud_recording:read:list_user_recordings:admin +team_chat:read:list_user_channels:admin +team_chat:read:list_user_messages:admin ---- - +[NOTE] +==== +The connector requires a minimum scope of `user:read:list_users:admin` to ingest data into Elasticsearch. +==== ++ 10. Click on the "Done" button to add the selected scopes to your app. 11. Navigate to the "Activation" section and input the necessary information to activate the app. @@ -220,18 +224,22 @@ To connect to Zoom you need to https://developers.zoom.us/docs/internal-apps/s2s 6. Click on the "Create" button to create the app registration. 7. After the registration is complete, you will be redirected to the app's overview page. Take note of the "App Credentials" value, as you'll need it later. 8. Navigate to the "Scopes" section and click on the "Add Scopes" button. -9. The following scopes need to be added to the app. +9. The following granular scopes need to be added to the app. + [source,bash] ---- -user:read:admin -meeting:read:admin -chat_channel:read:admin -recording:read:admin -chat_message:read:admin -report:read:admin +user:read:list_users:admin +meeting:read:list_meetings:admin +meeting:read:list_past_participants:admin +cloud_recording:read:list_user_recordings:admin +team_chat:read:list_user_channels:admin +team_chat:read:list_user_messages:admin ---- - +[NOTE] +==== +The connector requires a minimum scope of `user:read:list_users:admin` to ingest data into Elasticsearch. +==== ++ 10. Click on the "Done" button to add the selected scopes to your app. 11. Navigate to the "Activation" section and input the necessary information to activate the app. From 7decd52132fbdf8e2e7a8b093255434e63e77a0c Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Mon, 7 Oct 2024 15:08:23 +0100 Subject: [PATCH 06/85] Allow incubating Panama Vector in simdvec, and add vectorized ipByteBin (#112933) Add support for vectorized ipByteBin. The structure of the implementation and loading framework mirror that of Lucene, but is simplified by avoiding reflective loading since ES has support for a MRJar section for 21. For now, we just disable warnings-as-errors in this small sourceset, since -Xlint:-incubating is only support since JDK 22. The number of source files is small here. Will investigate how to assert that just the single incubating warning is emitted by javac, at a later point. --- docs/changelog/112933.yaml | 5 + libs/simdvec/build.gradle | 13 ++ libs/simdvec/src/main/java/module-info.java | 1 + .../elasticsearch/simdvec/ESVectorUtil.java | 27 ++++ .../DefaultESVectorUtilSupport.java | 39 +++++ .../DefaultESVectorizationProvider.java | 23 +++ .../vectorization/ESVectorUtilSupport.java | 17 ++ .../ESVectorizationProvider.java | 38 +++++ .../ESVectorizationProvider.java | 87 ++++++++++ .../PanamaESVectorUtilSupport.java | 153 ++++++++++++++++++ .../PanamaESVectorizationProvider.java | 24 +++ .../simdvec/ESVectorUtilTests.java | 130 +++++++++++++++ .../vectorization/BaseVectorizationTests.java | 29 ++++ 13 files changed, 586 insertions(+) create mode 100644 docs/changelog/112933.yaml create mode 100644 libs/simdvec/src/main/java/org/elasticsearch/simdvec/ESVectorUtil.java create mode 100644 libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/DefaultESVectorUtilSupport.java create mode 100644 libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/DefaultESVectorizationProvider.java create mode 100644 libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorUtilSupport.java create mode 100644 libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorizationProvider.java create mode 100644 libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorizationProvider.java create mode 100644 libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/PanamaESVectorUtilSupport.java create mode 100644 libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/PanamaESVectorizationProvider.java create mode 100644 libs/simdvec/src/test/java/org/elasticsearch/simdvec/ESVectorUtilTests.java create mode 100644 libs/simdvec/src/test/java/org/elasticsearch/simdvec/internal/vectorization/BaseVectorizationTests.java diff --git a/docs/changelog/112933.yaml b/docs/changelog/112933.yaml new file mode 100644 index 000000000000..222cd5aadf73 --- /dev/null +++ b/docs/changelog/112933.yaml @@ -0,0 +1,5 @@ +pr: 112933 +summary: "Allow incubating Panama Vector in simdvec, and add vectorized `ipByteBin`" +area: Search +type: enhancement +issues: [] diff --git a/libs/simdvec/build.gradle b/libs/simdvec/build.gradle index 5a523a19d4b6..8b676a15038c 100644 --- a/libs/simdvec/build.gradle +++ b/libs/simdvec/build.gradle @@ -23,6 +23,19 @@ dependencies { } } +tasks.named("compileMain21Java").configure { + options.compilerArgs << '--add-modules=jdk.incubator.vector' + // we remove Werror, since incubating suppression (-Xlint:-incubating) + // is only support since JDK 22 + options.compilerArgs -= '-Werror' +} + +test { + if (JavaVersion.current().majorVersion.toInteger() >= 21) { + jvmArgs '--add-modules=jdk.incubator.vector' + } +} + tasks.withType(CheckForbiddenApisTask).configureEach { replaceSignatureFiles 'jdk-signatures' } diff --git a/libs/simdvec/src/main/java/module-info.java b/libs/simdvec/src/main/java/module-info.java index 64e685ba3cbb..44f6e39d5dba 100644 --- a/libs/simdvec/src/main/java/module-info.java +++ b/libs/simdvec/src/main/java/module-info.java @@ -10,6 +10,7 @@ module org.elasticsearch.simdvec { requires org.elasticsearch.nativeaccess; requires org.apache.lucene.core; + requires org.elasticsearch.logging; exports org.elasticsearch.simdvec to org.elasticsearch.server; } diff --git a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/ESVectorUtil.java b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/ESVectorUtil.java new file mode 100644 index 000000000000..91193d5fa6ea --- /dev/null +++ b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/ESVectorUtil.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.simdvec; + +import org.elasticsearch.simdvec.internal.vectorization.ESVectorUtilSupport; +import org.elasticsearch.simdvec.internal.vectorization.ESVectorizationProvider; + +import static org.elasticsearch.simdvec.internal.vectorization.ESVectorUtilSupport.B_QUERY; + +public class ESVectorUtil { + + private static final ESVectorUtilSupport IMPL = ESVectorizationProvider.getInstance().getVectorUtilSupport(); + + public static long ipByteBinByte(byte[] q, byte[] d) { + if (q.length != d.length * B_QUERY) { + throw new IllegalArgumentException("vector dimensions incompatible: " + q.length + "!= " + B_QUERY + " x " + d.length); + } + return IMPL.ipByteBinByte(q, d); + } +} diff --git a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/DefaultESVectorUtilSupport.java b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/DefaultESVectorUtilSupport.java new file mode 100644 index 000000000000..4a08096119d6 --- /dev/null +++ b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/DefaultESVectorUtilSupport.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.simdvec.internal.vectorization; + +import org.apache.lucene.util.BitUtil; + +final class DefaultESVectorUtilSupport implements ESVectorUtilSupport { + + DefaultESVectorUtilSupport() {} + + @Override + public long ipByteBinByte(byte[] q, byte[] d) { + return ipByteBinByteImpl(q, d); + } + + public static long ipByteBinByteImpl(byte[] q, byte[] d) { + long ret = 0; + int size = d.length; + for (int i = 0; i < B_QUERY; i++) { + int r = 0; + long subRet = 0; + for (final int upperBound = d.length & -Integer.BYTES; r < upperBound; r += Integer.BYTES) { + subRet += Integer.bitCount((int) BitUtil.VH_NATIVE_INT.get(q, i * size + r) & (int) BitUtil.VH_NATIVE_INT.get(d, r)); + } + for (; r < d.length; r++) { + subRet += Integer.bitCount((q[i * size + r] & d[r]) & 0xFF); + } + ret += subRet << i; + } + return ret; + } +} diff --git a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/DefaultESVectorizationProvider.java b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/DefaultESVectorizationProvider.java new file mode 100644 index 000000000000..6c0f7ed146b8 --- /dev/null +++ b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/DefaultESVectorizationProvider.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.simdvec.internal.vectorization; + +final class DefaultESVectorizationProvider extends ESVectorizationProvider { + private final ESVectorUtilSupport vectorUtilSupport; + + DefaultESVectorizationProvider() { + vectorUtilSupport = new DefaultESVectorUtilSupport(); + } + + @Override + public ESVectorUtilSupport getVectorUtilSupport() { + return vectorUtilSupport; + } +} diff --git a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorUtilSupport.java b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorUtilSupport.java new file mode 100644 index 000000000000..d7611173ca69 --- /dev/null +++ b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorUtilSupport.java @@ -0,0 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.simdvec.internal.vectorization; + +public interface ESVectorUtilSupport { + + short B_QUERY = 4; + + long ipByteBinByte(byte[] q, byte[] d); +} diff --git a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorizationProvider.java b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorizationProvider.java new file mode 100644 index 000000000000..e541c10e145b --- /dev/null +++ b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorizationProvider.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.simdvec.internal.vectorization; + +import java.util.Objects; + +public abstract class ESVectorizationProvider { + + public static ESVectorizationProvider getInstance() { + return Objects.requireNonNull( + ESVectorizationProvider.Holder.INSTANCE, + "call to getInstance() from subclass of VectorizationProvider" + ); + } + + ESVectorizationProvider() {} + + public abstract ESVectorUtilSupport getVectorUtilSupport(); + + // visible for tests + static ESVectorizationProvider lookup(boolean testMode) { + return new DefaultESVectorizationProvider(); + } + + /** This static holder class prevents classloading deadlock. */ + private static final class Holder { + private Holder() {} + + static final ESVectorizationProvider INSTANCE = lookup(false); + } +} diff --git a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorizationProvider.java b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorizationProvider.java new file mode 100644 index 000000000000..5b7aab7ddfa4 --- /dev/null +++ b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/ESVectorizationProvider.java @@ -0,0 +1,87 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.simdvec.internal.vectorization; + +import org.apache.lucene.util.Constants; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; + +import java.util.Locale; +import java.util.Objects; +import java.util.Optional; + +public abstract class ESVectorizationProvider { + + protected static final Logger logger = LogManager.getLogger(ESVectorizationProvider.class); + + public static ESVectorizationProvider getInstance() { + return Objects.requireNonNull( + ESVectorizationProvider.Holder.INSTANCE, + "call to getInstance() from subclass of VectorizationProvider" + ); + } + + ESVectorizationProvider() {} + + public abstract ESVectorUtilSupport getVectorUtilSupport(); + + // visible for tests + static ESVectorizationProvider lookup(boolean testMode) { + final int runtimeVersion = Runtime.version().feature(); + assert runtimeVersion >= 21; + if (runtimeVersion <= 23) { + // only use vector module with Hotspot VM + if (Constants.IS_HOTSPOT_VM == false) { + logger.warn("Java runtime is not using Hotspot VM; Java vector incubator API can't be enabled."); + return new DefaultESVectorizationProvider(); + } + // is the incubator module present and readable (JVM providers may to exclude them or it is + // build with jlink) + final var vectorMod = lookupVectorModule(); + if (vectorMod.isEmpty()) { + logger.warn( + "Java vector incubator module is not readable. " + + "For optimal vector performance, pass '--add-modules jdk.incubator.vector' to enable Vector API." + ); + return new DefaultESVectorizationProvider(); + } + vectorMod.ifPresent(ESVectorizationProvider.class.getModule()::addReads); + var impl = new PanamaESVectorizationProvider(); + logger.info( + String.format( + Locale.ENGLISH, + "Java vector incubator API enabled; uses preferredBitSize=%d", + PanamaESVectorUtilSupport.VECTOR_BITSIZE + ) + ); + return impl; + } else { + logger.warn( + "You are running with unsupported Java " + + runtimeVersion + + ". To make full use of the Vector API, please update Elasticsearch." + ); + } + return new DefaultESVectorizationProvider(); + } + + private static Optional lookupVectorModule() { + return Optional.ofNullable(ESVectorizationProvider.class.getModule().getLayer()) + .orElse(ModuleLayer.boot()) + .findModule("jdk.incubator.vector"); + } + + /** This static holder class prevents classloading deadlock. */ + private static final class Holder { + private Holder() {} + + static final ESVectorizationProvider INSTANCE = lookup(false); + } +} diff --git a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/PanamaESVectorUtilSupport.java b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/PanamaESVectorUtilSupport.java new file mode 100644 index 000000000000..0e5827d04673 --- /dev/null +++ b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/PanamaESVectorUtilSupport.java @@ -0,0 +1,153 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.simdvec.internal.vectorization; + +import jdk.incubator.vector.ByteVector; +import jdk.incubator.vector.IntVector; +import jdk.incubator.vector.LongVector; +import jdk.incubator.vector.VectorOperators; +import jdk.incubator.vector.VectorShape; +import jdk.incubator.vector.VectorSpecies; + +import org.apache.lucene.util.Constants; + +public final class PanamaESVectorUtilSupport implements ESVectorUtilSupport { + + static final int VECTOR_BITSIZE; + + /** Whether integer vectors can be trusted to actually be fast. */ + static final boolean HAS_FAST_INTEGER_VECTORS; + + static { + // default to platform supported bitsize + VECTOR_BITSIZE = VectorShape.preferredShape().vectorBitSize(); + + // hotspot misses some SSE intrinsics, workaround it + // to be fair, they do document this thing only works well with AVX2/AVX3 and Neon + boolean isAMD64withoutAVX2 = Constants.OS_ARCH.equals("amd64") && VECTOR_BITSIZE < 256; + HAS_FAST_INTEGER_VECTORS = isAMD64withoutAVX2 == false; + } + + @Override + public long ipByteBinByte(byte[] q, byte[] d) { + // 128 / 8 == 16 + if (d.length >= 16 && HAS_FAST_INTEGER_VECTORS) { + if (VECTOR_BITSIZE >= 256) { + return ipByteBin256(q, d); + } else if (VECTOR_BITSIZE == 128) { + return ipByteBin128(q, d); + } + } + return DefaultESVectorUtilSupport.ipByteBinByteImpl(q, d); + } + + private static final VectorSpecies BYTE_SPECIES_128 = ByteVector.SPECIES_128; + private static final VectorSpecies BYTE_SPECIES_256 = ByteVector.SPECIES_256; + + static long ipByteBin256(byte[] q, byte[] d) { + long subRet0 = 0; + long subRet1 = 0; + long subRet2 = 0; + long subRet3 = 0; + int i = 0; + + if (d.length >= ByteVector.SPECIES_256.vectorByteSize() * 2) { + int limit = ByteVector.SPECIES_256.loopBound(d.length); + var sum0 = LongVector.zero(LongVector.SPECIES_256); + var sum1 = LongVector.zero(LongVector.SPECIES_256); + var sum2 = LongVector.zero(LongVector.SPECIES_256); + var sum3 = LongVector.zero(LongVector.SPECIES_256); + for (; i < limit; i += ByteVector.SPECIES_256.length()) { + var vq0 = ByteVector.fromArray(BYTE_SPECIES_256, q, i).reinterpretAsLongs(); + var vq1 = ByteVector.fromArray(BYTE_SPECIES_256, q, i + d.length).reinterpretAsLongs(); + var vq2 = ByteVector.fromArray(BYTE_SPECIES_256, q, i + d.length * 2).reinterpretAsLongs(); + var vq3 = ByteVector.fromArray(BYTE_SPECIES_256, q, i + d.length * 3).reinterpretAsLongs(); + var vd = ByteVector.fromArray(BYTE_SPECIES_256, d, i).reinterpretAsLongs(); + sum0 = sum0.add(vq0.and(vd).lanewise(VectorOperators.BIT_COUNT)); + sum1 = sum1.add(vq1.and(vd).lanewise(VectorOperators.BIT_COUNT)); + sum2 = sum2.add(vq2.and(vd).lanewise(VectorOperators.BIT_COUNT)); + sum3 = sum3.add(vq3.and(vd).lanewise(VectorOperators.BIT_COUNT)); + } + subRet0 += sum0.reduceLanes(VectorOperators.ADD); + subRet1 += sum1.reduceLanes(VectorOperators.ADD); + subRet2 += sum2.reduceLanes(VectorOperators.ADD); + subRet3 += sum3.reduceLanes(VectorOperators.ADD); + } + + if (d.length - i >= ByteVector.SPECIES_128.vectorByteSize()) { + var sum0 = LongVector.zero(LongVector.SPECIES_128); + var sum1 = LongVector.zero(LongVector.SPECIES_128); + var sum2 = LongVector.zero(LongVector.SPECIES_128); + var sum3 = LongVector.zero(LongVector.SPECIES_128); + int limit = ByteVector.SPECIES_128.loopBound(d.length); + for (; i < limit; i += ByteVector.SPECIES_128.length()) { + var vq0 = ByteVector.fromArray(BYTE_SPECIES_128, q, i).reinterpretAsLongs(); + var vq1 = ByteVector.fromArray(BYTE_SPECIES_128, q, i + d.length).reinterpretAsLongs(); + var vq2 = ByteVector.fromArray(BYTE_SPECIES_128, q, i + d.length * 2).reinterpretAsLongs(); + var vq3 = ByteVector.fromArray(BYTE_SPECIES_128, q, i + d.length * 3).reinterpretAsLongs(); + var vd = ByteVector.fromArray(BYTE_SPECIES_128, d, i).reinterpretAsLongs(); + sum0 = sum0.add(vq0.and(vd).lanewise(VectorOperators.BIT_COUNT)); + sum1 = sum1.add(vq1.and(vd).lanewise(VectorOperators.BIT_COUNT)); + sum2 = sum2.add(vq2.and(vd).lanewise(VectorOperators.BIT_COUNT)); + sum3 = sum3.add(vq3.and(vd).lanewise(VectorOperators.BIT_COUNT)); + } + subRet0 += sum0.reduceLanes(VectorOperators.ADD); + subRet1 += sum1.reduceLanes(VectorOperators.ADD); + subRet2 += sum2.reduceLanes(VectorOperators.ADD); + subRet3 += sum3.reduceLanes(VectorOperators.ADD); + } + // tail as bytes + for (; i < d.length; i++) { + subRet0 += Integer.bitCount((q[i] & d[i]) & 0xFF); + subRet1 += Integer.bitCount((q[i + d.length] & d[i]) & 0xFF); + subRet2 += Integer.bitCount((q[i + 2 * d.length] & d[i]) & 0xFF); + subRet3 += Integer.bitCount((q[i + 3 * d.length] & d[i]) & 0xFF); + } + return subRet0 + (subRet1 << 1) + (subRet2 << 2) + (subRet3 << 3); + } + + public static long ipByteBin128(byte[] q, byte[] d) { + long subRet0 = 0; + long subRet1 = 0; + long subRet2 = 0; + long subRet3 = 0; + int i = 0; + + var sum0 = IntVector.zero(IntVector.SPECIES_128); + var sum1 = IntVector.zero(IntVector.SPECIES_128); + var sum2 = IntVector.zero(IntVector.SPECIES_128); + var sum3 = IntVector.zero(IntVector.SPECIES_128); + int limit = ByteVector.SPECIES_128.loopBound(d.length); + for (; i < limit; i += ByteVector.SPECIES_128.length()) { + var vd = ByteVector.fromArray(BYTE_SPECIES_128, d, i).reinterpretAsInts(); + var vq0 = ByteVector.fromArray(BYTE_SPECIES_128, q, i).reinterpretAsInts(); + var vq1 = ByteVector.fromArray(BYTE_SPECIES_128, q, i + d.length).reinterpretAsInts(); + var vq2 = ByteVector.fromArray(BYTE_SPECIES_128, q, i + d.length * 2).reinterpretAsInts(); + var vq3 = ByteVector.fromArray(BYTE_SPECIES_128, q, i + d.length * 3).reinterpretAsInts(); + sum0 = sum0.add(vd.and(vq0).lanewise(VectorOperators.BIT_COUNT)); + sum1 = sum1.add(vd.and(vq1).lanewise(VectorOperators.BIT_COUNT)); + sum2 = sum2.add(vd.and(vq2).lanewise(VectorOperators.BIT_COUNT)); + sum3 = sum3.add(vd.and(vq3).lanewise(VectorOperators.BIT_COUNT)); + } + subRet0 += sum0.reduceLanes(VectorOperators.ADD); + subRet1 += sum1.reduceLanes(VectorOperators.ADD); + subRet2 += sum2.reduceLanes(VectorOperators.ADD); + subRet3 += sum3.reduceLanes(VectorOperators.ADD); + // tail as bytes + for (; i < d.length; i++) { + int dValue = d[i]; + subRet0 += Integer.bitCount((dValue & q[i]) & 0xFF); + subRet1 += Integer.bitCount((dValue & q[i + d.length]) & 0xFF); + subRet2 += Integer.bitCount((dValue & q[i + 2 * d.length]) & 0xFF); + subRet3 += Integer.bitCount((dValue & q[i + 3 * d.length]) & 0xFF); + } + return subRet0 + (subRet1 << 1) + (subRet2 << 2) + (subRet3 << 3); + } +} diff --git a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/PanamaESVectorizationProvider.java b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/PanamaESVectorizationProvider.java new file mode 100644 index 000000000000..62d25d79487e --- /dev/null +++ b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/PanamaESVectorizationProvider.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.simdvec.internal.vectorization; + +final class PanamaESVectorizationProvider extends ESVectorizationProvider { + + private final ESVectorUtilSupport vectorUtilSupport; + + PanamaESVectorizationProvider() { + vectorUtilSupport = new PanamaESVectorUtilSupport(); + } + + @Override + public ESVectorUtilSupport getVectorUtilSupport() { + return vectorUtilSupport; + } +} diff --git a/libs/simdvec/src/test/java/org/elasticsearch/simdvec/ESVectorUtilTests.java b/libs/simdvec/src/test/java/org/elasticsearch/simdvec/ESVectorUtilTests.java new file mode 100644 index 000000000000..0dbc41c0c105 --- /dev/null +++ b/libs/simdvec/src/test/java/org/elasticsearch/simdvec/ESVectorUtilTests.java @@ -0,0 +1,130 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.simdvec; + +import org.elasticsearch.simdvec.internal.vectorization.BaseVectorizationTests; +import org.elasticsearch.simdvec.internal.vectorization.ESVectorizationProvider; + +import java.util.Arrays; + +import static org.elasticsearch.simdvec.internal.vectorization.ESVectorUtilSupport.B_QUERY; + +public class ESVectorUtilTests extends BaseVectorizationTests { + + static final ESVectorizationProvider defaultedProvider = BaseVectorizationTests.defaultProvider(); + static final ESVectorizationProvider defOrPanamaProvider = BaseVectorizationTests.maybePanamaProvider(); + + public void testIpByteBinInvariants() { + int iterations = atLeast(10); + for (int i = 0; i < iterations; i++) { + int size = randomIntBetween(1, 10); + var d = new byte[size]; + var q = new byte[size * B_QUERY - 1]; + expectThrows(IllegalArgumentException.class, () -> ESVectorUtil.ipByteBinByte(q, d)); + } + } + + public void testBasicIpByteBin() { + testBasicIpByteBinImpl(ESVectorUtil::ipByteBinByte); + testBasicIpByteBinImpl(defaultedProvider.getVectorUtilSupport()::ipByteBinByte); + testBasicIpByteBinImpl(defOrPanamaProvider.getVectorUtilSupport()::ipByteBinByte); + } + + interface IpByteBin { + long apply(byte[] q, byte[] d); + } + + void testBasicIpByteBinImpl(IpByteBin ipByteBinFunc) { + assertEquals(15L, ipByteBinFunc.apply(new byte[] { 1, 1, 1, 1 }, new byte[] { 1 })); + assertEquals(30L, ipByteBinFunc.apply(new byte[] { 1, 2, 1, 2, 1, 2, 1, 2 }, new byte[] { 1, 2 })); + + var d = new byte[] { 1, 2, 3 }; + var q = new byte[] { 1, 2, 3, 1, 2, 3, 1, 2, 3, 1, 2, 3 }; + assert scalarIpByteBin(q, d) == 60L; // 4 + 8 + 16 + 32 + assertEquals(60L, ipByteBinFunc.apply(q, d)); + + d = new byte[] { 1, 2, 3, 4 }; + q = new byte[] { 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4 }; + assert scalarIpByteBin(q, d) == 75L; // 5 + 10 + 20 + 40 + assertEquals(75L, ipByteBinFunc.apply(q, d)); + + d = new byte[] { 1, 2, 3, 4, 5 }; + q = new byte[] { 1, 2, 3, 4, 5, 1, 2, 3, 4, 5, 1, 2, 3, 4, 5, 1, 2, 3, 4, 5 }; + assert scalarIpByteBin(q, d) == 105L; // 7 + 14 + 28 + 56 + assertEquals(105L, ipByteBinFunc.apply(q, d)); + + d = new byte[] { 1, 2, 3, 4, 5, 6 }; + q = new byte[] { 1, 2, 3, 4, 5, 6, 1, 2, 3, 4, 5, 6, 1, 2, 3, 4, 5, 6, 1, 2, 3, 4, 5, 6 }; + assert scalarIpByteBin(q, d) == 135L; // 9 + 18 + 36 + 72 + assertEquals(135L, ipByteBinFunc.apply(q, d)); + + d = new byte[] { 1, 2, 3, 4, 5, 6, 7 }; + q = new byte[] { 1, 2, 3, 4, 5, 6, 7, 1, 2, 3, 4, 5, 6, 7, 1, 2, 3, 4, 5, 6, 7, 1, 2, 3, 4, 5, 6, 7 }; + assert scalarIpByteBin(q, d) == 180L; // 12 + 24 + 48 + 96 + assertEquals(180L, ipByteBinFunc.apply(q, d)); + + d = new byte[] { 1, 2, 3, 4, 5, 6, 7, 8 }; + q = new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8 }; + assert scalarIpByteBin(q, d) == 195L; // 13 + 26 + 52 + 104 + assertEquals(195L, ipByteBinFunc.apply(q, d)); + + d = new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9 }; + q = new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7, 8, 9 }; + assert scalarIpByteBin(q, d) == 225L; // 15 + 30 + 60 + 120 + assertEquals(225L, ipByteBinFunc.apply(q, d)); + } + + public void testIpByteBin() { + testIpByteBinImpl(ESVectorUtil::ipByteBinByte); + testIpByteBinImpl(defaultedProvider.getVectorUtilSupport()::ipByteBinByte); + testIpByteBinImpl(defOrPanamaProvider.getVectorUtilSupport()::ipByteBinByte); + } + + void testIpByteBinImpl(IpByteBin ipByteBinFunc) { + int iterations = atLeast(50); + for (int i = 0; i < iterations; i++) { + int size = random().nextInt(5000); + var d = new byte[size]; + var q = new byte[size * B_QUERY]; + random().nextBytes(d); + random().nextBytes(q); + assertEquals(scalarIpByteBin(q, d), ipByteBinFunc.apply(q, d)); + + Arrays.fill(d, Byte.MAX_VALUE); + Arrays.fill(q, Byte.MAX_VALUE); + assertEquals(scalarIpByteBin(q, d), ipByteBinFunc.apply(q, d)); + + Arrays.fill(d, Byte.MIN_VALUE); + Arrays.fill(q, Byte.MIN_VALUE); + assertEquals(scalarIpByteBin(q, d), ipByteBinFunc.apply(q, d)); + } + } + + static int scalarIpByteBin(byte[] q, byte[] d) { + int res = 0; + for (int i = 0; i < B_QUERY; i++) { + res += (popcount(q, i * d.length, d, d.length) << i); + } + return res; + } + + public static int popcount(byte[] a, int aOffset, byte[] b, int length) { + int res = 0; + for (int j = 0; j < length; j++) { + int value = (a[aOffset + j] & b[j]) & 0xFF; + for (int k = 0; k < Byte.SIZE; k++) { + if ((value & (1 << k)) != 0) { + ++res; + } + } + } + return res; + } +} diff --git a/libs/simdvec/src/test/java/org/elasticsearch/simdvec/internal/vectorization/BaseVectorizationTests.java b/libs/simdvec/src/test/java/org/elasticsearch/simdvec/internal/vectorization/BaseVectorizationTests.java new file mode 100644 index 000000000000..f2bc8a11b04a --- /dev/null +++ b/libs/simdvec/src/test/java/org/elasticsearch/simdvec/internal/vectorization/BaseVectorizationTests.java @@ -0,0 +1,29 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.simdvec.internal.vectorization; + +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +public class BaseVectorizationTests extends ESTestCase { + + @Before + public void sanity() { + assert Runtime.version().feature() < 21 || ModuleLayer.boot().findModule("jdk.incubator.vector").isPresent(); + } + + public static ESVectorizationProvider defaultProvider() { + return new DefaultESVectorizationProvider(); + } + + public static ESVectorizationProvider maybePanamaProvider() { + return ESVectorizationProvider.lookup(true); + } +} From 58cc37922c159714c5c384a44cdf444686021e51 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Mon, 7 Oct 2024 16:23:54 +0200 Subject: [PATCH 07/85] Improve performance of LongLongHash#removeAndAdd (#114230) remove some unnecessary manipulation of the keys in the method removeAndAdd. --- .../org/elasticsearch/common/util/LongLongHash.java | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/util/LongLongHash.java b/server/src/main/java/org/elasticsearch/common/util/LongLongHash.java index 4a35cdaa9ab9..f7708af59dde 100644 --- a/server/src/main/java/org/elasticsearch/common/util/LongLongHash.java +++ b/server/src/main/java/org/elasticsearch/common/util/LongLongHash.java @@ -104,13 +104,16 @@ public final class LongLongHash extends AbstractHash { keys.set(keyOffset + 1, key2); } - private void reset(long key1, long key2, long id) { + private void reset(long id) { + final LongArray keys = this.keys; + final long keyOffset = id * 2; + final long key1 = keys.get(keyOffset); + final long key2 = keys.get(keyOffset + 1); final long slot = slot(hash(key1, key2), mask); for (long index = slot;; index = nextSlot(index, mask)) { final long curId = id(index); if (curId == -1) { // means unset setId(index, id); - append(id, key1, key2); break; } } @@ -134,10 +137,7 @@ public final class LongLongHash extends AbstractHash { protected void removeAndAdd(long index) { final long id = getAndSetId(index, -1); assert id >= 0; - long keyOffset = id * 2; - final long key1 = keys.getAndSet(keyOffset, 0); - final long key2 = keys.getAndSet(keyOffset + 1, 0); - reset(key1, key2, id); + reset(id); } @Override From 67f1b02d56d8ad77921059dc34a9c4d5d945e471 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Mon, 7 Oct 2024 15:50:02 +0100 Subject: [PATCH 08/85] Add an UpdateForV9/10 to reroute cluster state (#114213) --- .../action/admin/cluster/reroute/ClusterRerouteResponse.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java index 9581279201be..4aa6ed60afe4 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java @@ -21,6 +21,8 @@ import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.core.UpdateForV10; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.xcontent.ToXContent; @@ -43,6 +45,8 @@ public class ClusterRerouteResponse extends ActionResponse implements IsAcknowle /** * To be removed when REST compatibility with {@link org.elasticsearch.Version#V_8_6_0} / {@link RestApiVersion#V_8} no longer needed */ + @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) // to remove from the v9 API only + @UpdateForV10(owner = UpdateForV10.Owner.DISTRIBUTED_COORDINATION) // to remove entirely private final ClusterState state; private final RoutingExplanations explanations; private final boolean acknowledged; From a83046a85ad1e35de2dda40ec6f9123923753ef5 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Mon, 7 Oct 2024 12:05:09 -0400 Subject: [PATCH 09/85] Fixing span gap builder tests (#114218) (#114219) With #113251 having a SpanMatchNoDocsQuery is a valid response to the rewrite. closes #114218 --- muted-tests.yml | 3 --- .../elasticsearch/index/query/SpanGapQueryBuilderTests.java | 5 ++++- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 7764c0f8865d..8b756adce545 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -371,9 +371,6 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/114187 - class: org.elasticsearch.xpack.esql.action.EsqlActionBreakerIT issue: https://github.com/elastic/elasticsearch/issues/114194 -- class: org.elasticsearch.index.query.SpanGapQueryBuilderTests - method: testToQuery - issue: https://github.com/elastic/elasticsearch/issues/114218 - class: org.elasticsearch.xpack.ilm.ExplainLifecycleIT method: testStepInfoPreservedOnAutoRetry issue: https://github.com/elastic/elasticsearch/issues/114220 diff --git a/server/src/test/java/org/elasticsearch/index/query/SpanGapQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SpanGapQueryBuilderTests.java index cef43a635541..5adca6d562dc 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SpanGapQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SpanGapQueryBuilderTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.queries.spans.SpanNearQuery; import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.Query; +import org.elasticsearch.lucene.queries.SpanMatchNoDocsQuery; import org.elasticsearch.test.AbstractQueryTestCase; import java.io.IOException; @@ -50,7 +51,9 @@ public class SpanGapQueryBuilderTests extends AbstractQueryTestCase Date: Mon, 7 Oct 2024 19:06:23 +0300 Subject: [PATCH 10/85] Avoid using `dynamic:strict` with `subobjects:false` at root (#114247) --- .../DefaultMappingParametersHandler.java | 30 ++++++++++--------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java index 1046e22e65ca..81bd80f46452 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java @@ -98,7 +98,22 @@ public class DefaultMappingParametersHandler implements DataSourceHandler { return new DataSourceResponse.ObjectMappingParametersGenerator(() -> { var parameters = new HashMap(); - if (request.parentSubobjects() == ObjectMapper.Subobjects.DISABLED) { + // Changing subobjects from subobjects: false is not supported, but we can f.e. go from "true" to "false". + // TODO enable subobjects: auto + // It is disabled because it currently does not have auto flattening and that results in asserts being triggered when using + // copy_to. + if (ESTestCase.randomBoolean()) { + parameters.put( + "subobjects", + ESTestCase.randomValueOtherThan( + ObjectMapper.Subobjects.AUTO, + () -> ESTestCase.randomFrom(ObjectMapper.Subobjects.values()) + ).toString() + ); + } + + if (request.parentSubobjects() == ObjectMapper.Subobjects.DISABLED + || parameters.getOrDefault("subobjects", "true").equals("false")) { // "enabled: false" is not compatible with subobjects: false // changing "dynamic" from parent context is not compatible with subobjects: false // changing subobjects value is not compatible with subobjects: false @@ -115,19 +130,6 @@ public class DefaultMappingParametersHandler implements DataSourceHandler { if (ESTestCase.randomBoolean()) { parameters.put("enabled", ESTestCase.randomFrom("true", "false")); } - // Changing subobjects from subobjects: false is not supported, but we can f.e. go from "true" to "false". - // TODO enable subobjects: auto - // It is disabled because it currently does not have auto flattening and that results in asserts being triggered when using - // copy_to. - if (ESTestCase.randomBoolean()) { - parameters.put( - "subobjects", - ESTestCase.randomValueOtherThan( - ObjectMapper.Subobjects.AUTO, - () -> ESTestCase.randomFrom(ObjectMapper.Subobjects.values()) - ).toString() - ); - } if (ESTestCase.randomBoolean()) { var value = request.isRoot() ? ESTestCase.randomFrom("none", "arrays") : ESTestCase.randomFrom("none", "arrays", "all"); From d47ca34b16243ac34e565a1623a6ce826bf039b8 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Mon, 7 Oct 2024 18:51:15 +0200 Subject: [PATCH 11/85] Fix Gradle configuration in idea for :libs:simdvec (#114251) --- libs/simdvec/build.gradle | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/libs/simdvec/build.gradle b/libs/simdvec/build.gradle index 8b676a15038c..dab5c25b3467 100644 --- a/libs/simdvec/build.gradle +++ b/libs/simdvec/build.gradle @@ -23,14 +23,15 @@ dependencies { } } -tasks.named("compileMain21Java").configure { +// compileMain21Java does not exist within idea (see MrJarPlugin) so we cannot reference directly by name +tasks.matching { it.name == "compileMain21Java" }.configureEach { options.compilerArgs << '--add-modules=jdk.incubator.vector' // we remove Werror, since incubating suppression (-Xlint:-incubating) // is only support since JDK 22 options.compilerArgs -= '-Werror' } -test { +tasks.named('test').configure { if (JavaVersion.current().majorVersion.toInteger() >= 21) { jvmArgs '--add-modules=jdk.incubator.vector' } From bafdd81d3d0d6793cb819616d675bb0f053f4b22 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 7 Oct 2024 13:02:14 -0400 Subject: [PATCH 12/85] ESQL: Reenable part of heap attack test (#114252) This reenables a test and adds more debugging to another one. We'll use this to collect more information the next time it fails. --- .../elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java index e45ac8a9e0f7..a24bd91206ac 100644 --- a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java @@ -355,7 +355,6 @@ public class HeapAttackIT extends ESRestTestCase { assertMap(map, mapMatcher.entry("columns", columns).entry("values", hasSize(10_000)).entry("took", greaterThanOrEqualTo(0))); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch-serverless/issues/1874") public void testTooManyEval() throws IOException { initManyLongs(); assertCircuitBreaks(() -> manyEval(490)); @@ -616,14 +615,13 @@ public class HeapAttackIT extends ESRestTestCase { private void bulk(String name, String bulk) throws IOException { Request request = new Request("POST", "/" + name + "/_bulk"); - request.addParameter("filter_path", "errors"); request.setJsonEntity(bulk); request.setOptions( RequestOptions.DEFAULT.toBuilder() .setRequestConfig(RequestConfig.custom().setSocketTimeout(Math.toIntExact(TimeValue.timeValueMinutes(5).millis())).build()) ); Response response = client().performRequest(request); - assertThat(EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8), equalTo("{\"errors\":false}")); + assertThat(entityAsMap(response), matchesMap().entry("errors", false).extraOk()); } private void initIndex(String name, String bulk) throws IOException { From d8cc7d3d2aa0138025ae6dd10a3929f480f527e5 Mon Sep 17 00:00:00 2001 From: Panagiotis Bailis Date: Mon, 7 Oct 2024 20:15:44 +0300 Subject: [PATCH 13/85] Updating RRF-related test cases to work with multiple shards and/or replicas (#114189) --- muted-tests.yml | 9 - .../retriever/RankDocRetrieverBuilderIT.java | 756 ------------ .../xpack/rank/rrf/RRFRetrieverBuilderIT.java | 36 +- .../rrf/RRFRetrieverBuilderNestedDocsIT.java | 23 +- .../test/rrf/350_rrf_retriever_pagination.yml | 1019 ++++++++++------- ...rrf_retriever_search_api_compatibility.yml | 506 +++++++- 6 files changed, 1103 insertions(+), 1246 deletions(-) delete mode 100644 server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RankDocRetrieverBuilderIT.java diff --git a/muted-tests.yml b/muted-tests.yml index 8b756adce545..4e1d6bbf4461 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -109,9 +109,6 @@ tests: - class: org.elasticsearch.xpack.ml.integration.MlJobIT method: testDeleteJobAsync issue: https://github.com/elastic/elasticsearch/issues/112212 -- class: org.elasticsearch.search.retriever.RankDocRetrieverBuilderIT - method: testRankDocsRetrieverWithCollapse - issue: https://github.com/elastic/elasticsearch/issues/112254 - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT method: test {yaml=reference/rest-api/watcher/put-watch/line_120} issue: https://github.com/elastic/elasticsearch/issues/99517 @@ -336,15 +333,9 @@ tests: - class: org.elasticsearch.xpack.inference.TextEmbeddingCrudIT method: testPutE5Small_withPlatformAgnosticVariant issue: https://github.com/elastic/elasticsearch/issues/113983 -- class: org.elasticsearch.xpack.rank.rrf.RRFRankClientYamlTestSuiteIT - method: test {yaml=rrf/700_rrf_retriever_search_api_compatibility/rrf retriever with top-level collapse} - issue: https://github.com/elastic/elasticsearch/issues/114019 - class: org.elasticsearch.xpack.inference.TextEmbeddingCrudIT method: testPutE5WithTrainedModelAndInference issue: https://github.com/elastic/elasticsearch/issues/114023 -- class: org.elasticsearch.xpack.rank.rrf.RRFRetrieverBuilderIT - method: testRRFWithCollapse - issue: https://github.com/elastic/elasticsearch/issues/114074 - class: org.elasticsearch.xpack.inference.TextEmbeddingCrudIT method: testPutE5Small_withPlatformSpecificVariant issue: https://github.com/elastic/elasticsearch/issues/113950 diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RankDocRetrieverBuilderIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RankDocRetrieverBuilderIT.java deleted file mode 100644 index b78448bfd873..000000000000 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RankDocRetrieverBuilderIT.java +++ /dev/null @@ -1,756 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.search.retriever; - -import org.apache.lucene.search.TotalHits; -import org.apache.lucene.search.join.ScoreMode; -import org.apache.lucene.util.SetOnce; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.search.MultiSearchRequest; -import org.elasticsearch.action.search.MultiSearchResponse; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.TransportMultiSearchAction; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.Maps; -import org.elasticsearch.index.query.InnerHitBuilder; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.index.query.QueryRewriteContext; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.MockSearchService; -import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; -import org.elasticsearch.search.builder.PointInTimeBuilder; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.collapse.CollapseBuilder; -import org.elasticsearch.search.rank.RankDoc; -import org.elasticsearch.search.sort.FieldSortBuilder; -import org.elasticsearch.search.sort.NestedSortBuilder; -import org.elasticsearch.search.sort.ScoreSortBuilder; -import org.elasticsearch.search.sort.ShardDocSortField; -import org.elasticsearch.search.sort.SortBuilder; -import org.elasticsearch.search.sort.SortOrder; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentType; -import org.junit.Before; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS; -import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; -import static org.hamcrest.Matchers.equalTo; - -public class RankDocRetrieverBuilderIT extends ESIntegTestCase { - - @Override - protected Collection> nodePlugins() { - return List.of(MockSearchService.TestPlugin.class); - } - - public record RetrieverSource(RetrieverBuilder retriever, SearchSourceBuilder source) {} - - private static String INDEX = "test_index"; - private static final String ID_FIELD = "_id"; - private static final String DOC_FIELD = "doc"; - private static final String TEXT_FIELD = "text"; - private static final String VECTOR_FIELD = "vector"; - private static final String TOPIC_FIELD = "topic"; - private static final String LAST_30D_FIELD = "views.last30d"; - private static final String ALL_TIME_FIELD = "views.all"; - - @Before - public void setup() throws Exception { - String mapping = """ - { - "properties": { - "vector": { - "type": "dense_vector", - "dims": 3, - "element_type": "float", - "index": true, - "similarity": "l2_norm", - "index_options": { - "type": "hnsw" - } - }, - "text": { - "type": "text" - }, - "doc": { - "type": "keyword" - }, - "topic": { - "type": "keyword" - }, - "views": { - "type": "nested", - "properties": { - "last30d": { - "type": "integer" - }, - "all": { - "type": "integer" - } - } - } - } - } - """; - createIndex(INDEX, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0).build()); - admin().indices().preparePutMapping(INDEX).setSource(mapping, XContentType.JSON).get(); - indexDoc( - INDEX, - "doc_1", - DOC_FIELD, - "doc_1", - TOPIC_FIELD, - "technology", - TEXT_FIELD, - "the quick brown fox jumps over the lazy dog", - LAST_30D_FIELD, - 100 - ); - indexDoc( - INDEX, - "doc_2", - DOC_FIELD, - "doc_2", - TOPIC_FIELD, - "astronomy", - TEXT_FIELD, - "you know, for Search!", - VECTOR_FIELD, - new float[] { 1.0f, 2.0f, 3.0f }, - LAST_30D_FIELD, - 3 - ); - indexDoc(INDEX, "doc_3", DOC_FIELD, "doc_3", TOPIC_FIELD, "technology", VECTOR_FIELD, new float[] { 6.0f, 6.0f, 6.0f }); - indexDoc( - INDEX, - "doc_4", - DOC_FIELD, - "doc_4", - TOPIC_FIELD, - "technology", - TEXT_FIELD, - "aardvark is a really awesome animal, but not very quick", - ALL_TIME_FIELD, - 100, - LAST_30D_FIELD, - 40 - ); - indexDoc(INDEX, "doc_5", DOC_FIELD, "doc_5", TOPIC_FIELD, "science", TEXT_FIELD, "irrelevant stuff"); - indexDoc( - INDEX, - "doc_6", - DOC_FIELD, - "doc_6", - TEXT_FIELD, - "quick quick quick quick search", - VECTOR_FIELD, - new float[] { 10.0f, 30.0f, 100.0f }, - LAST_30D_FIELD, - 15 - ); - indexDoc( - INDEX, - "doc_7", - DOC_FIELD, - "doc_7", - TOPIC_FIELD, - "biology", - TEXT_FIELD, - "dog", - VECTOR_FIELD, - new float[] { 3.0f, 3.0f, 3.0f }, - ALL_TIME_FIELD, - 1000 - ); - refresh(INDEX); - } - - public void testRankDocsRetrieverBasicWithPagination() { - final int rankWindowSize = 100; - SearchSourceBuilder source = new SearchSourceBuilder(); - StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder(); - // this one retrieves docs 1, 4, and 6 - standard0.queryBuilder = QueryBuilders.boolQuery() - .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_1")).boost(10L)) - .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_4")).boost(9L)) - .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(8L)); - StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder(); - // this one retrieves docs 2 and 6 due to prefilter - standard1.queryBuilder = QueryBuilders.constantScoreQuery(QueryBuilders.termsQuery(ID_FIELD, "doc_2", "doc_3", "doc_6")).boost(20L); - standard1.preFilterQueryBuilders.add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); - // this one retrieves docs 7, 2, 3, and 6 - KnnRetrieverBuilder knnRetrieverBuilder = new KnnRetrieverBuilder( - VECTOR_FIELD, - new float[] { 3.0f, 3.0f, 3.0f }, - null, - 10, - 100, - null - ); - // the compound retriever here produces a score for a doc based on the percentage of the queries that it was matched on and - // resolves ties based on actual score, and then the doc (we're forcing 1 shard for consistent results) - // so ideal rank would be: 6, 2, 1, 3, 4, 7 and with pagination, we'd just omit the first result - source.retriever( - new CompoundRetrieverWithRankDocs( - rankWindowSize, - Arrays.asList( - new RetrieverSource(standard0, null), - new RetrieverSource(standard1, null), - new RetrieverSource(knnRetrieverBuilder, null) - ) - ) - ); - // include some pagination as well - source.from(1); - SearchRequestBuilder req = client().prepareSearch(INDEX).setSource(source); - ElasticsearchAssertions.assertResponse(req, resp -> { - assertNull(resp.pointInTimeId()); - assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); - assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); - assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_1")); - assertThat(resp.getHits().getAt(2).getId(), equalTo("doc_3")); - assertThat(resp.getHits().getAt(3).getId(), equalTo("doc_4")); - assertThat(resp.getHits().getAt(4).getId(), equalTo("doc_7")); - }); - } - - public void testRankDocsRetrieverWithAggs() { - // same as above, but we only want to bring back the top result from each subsearch - // so that would be 1, 2, and 7 - // and final rank would be (based on score): 2, 1, 7 - // aggs should still account for the same docs as the testRankDocsRetriever test, i.e. all but doc_5 - final int rankWindowSize = 1; - SearchSourceBuilder source = new SearchSourceBuilder(); - StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder(); - // this one retrieves docs 1, 4, and 6 - standard0.queryBuilder = QueryBuilders.boolQuery() - .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_1")).boost(10L)) - .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_4")).boost(9L)) - .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(8L)); - StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder(); - // this one retrieves docs 2 and 6 due to prefilter - standard1.queryBuilder = QueryBuilders.constantScoreQuery(QueryBuilders.termsQuery(ID_FIELD, "doc_2", "doc_3", "doc_6")).boost(20L); - standard1.preFilterQueryBuilders.add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); - // this one retrieves docs 7, 2, 3, and 6 - KnnRetrieverBuilder knnRetrieverBuilder = new KnnRetrieverBuilder( - VECTOR_FIELD, - new float[] { 3.0f, 3.0f, 3.0f }, - null, - 10, - 100, - null - ); - source.retriever( - new CompoundRetrieverWithRankDocs( - rankWindowSize, - Arrays.asList( - new RetrieverSource(standard0, null), - new RetrieverSource(standard1, null), - new RetrieverSource(knnRetrieverBuilder, null) - ) - ) - ); - source.size(1); - source.aggregation(new TermsAggregationBuilder("topic").field(TOPIC_FIELD)); - SearchRequestBuilder req = client().prepareSearch(INDEX).setSource(source); - ElasticsearchAssertions.assertResponse(req, resp -> { - assertNull(resp.pointInTimeId()); - assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(5L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); - assertThat(resp.getHits().getHits().length, equalTo(1)); - assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); - assertNotNull(resp.getAggregations()); - assertNotNull(resp.getAggregations().get("topic")); - Terms terms = resp.getAggregations().get("topic"); - // doc_3 is not part of the final aggs computation as it is only retrieved through the knn retriever - // and is outside of the rank window - assertThat(terms.getBucketByKey("technology").getDocCount(), equalTo(2L)); - assertThat(terms.getBucketByKey("astronomy").getDocCount(), equalTo(1L)); - assertThat(terms.getBucketByKey("biology").getDocCount(), equalTo(1L)); - }); - } - - public void testRankDocsRetrieverWithCollapse() { - final int rankWindowSize = 100; - SearchSourceBuilder source = new SearchSourceBuilder(); - StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder(); - // this one retrieves docs 1, 4, and 6 - standard0.queryBuilder = QueryBuilders.boolQuery() - .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_1")).boost(10L)) - .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_4")).boost(9L)) - .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(8L)); - StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder(); - // this one retrieves docs 2 and 6 due to prefilter - standard1.queryBuilder = QueryBuilders.constantScoreQuery(QueryBuilders.termsQuery(ID_FIELD, "doc_2", "doc_3", "doc_6")).boost(20L); - standard1.preFilterQueryBuilders.add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); - // this one retrieves docs 7, 2, 3, and 6 - KnnRetrieverBuilder knnRetrieverBuilder = new KnnRetrieverBuilder( - VECTOR_FIELD, - new float[] { 3.0f, 3.0f, 3.0f }, - null, - 10, - 100, - null - ); - // the compound retriever here produces a score for a doc based on the percentage of the queries that it was matched on and - // resolves ties based on actual score, and then the doc (we're forcing 1 shard for consistent results) - // so ideal rank would be: 6, 2, 1, 3, 4, 7 - // with collapsing on topic field we would have 6, 2, 1, 7 - source.retriever( - new CompoundRetrieverWithRankDocs( - rankWindowSize, - Arrays.asList( - new RetrieverSource(standard0, null), - new RetrieverSource(standard1, null), - new RetrieverSource(knnRetrieverBuilder, null) - ) - ) - ); - source.collapse( - new CollapseBuilder(TOPIC_FIELD).setInnerHits( - new InnerHitBuilder("a").addSort(new FieldSortBuilder(DOC_FIELD).order(SortOrder.DESC)).setSize(10) - ) - ); - source.fetchField(TOPIC_FIELD); - SearchRequestBuilder req = client().prepareSearch(INDEX).setSource(source); - ElasticsearchAssertions.assertResponse(req, resp -> { - assertNull(resp.pointInTimeId()); - assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); - assertThat(resp.getHits().getHits().length, equalTo(4)); - assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_6")); - assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_2")); - assertThat(resp.getHits().getAt(1).field(TOPIC_FIELD).getValue().toString(), equalTo("astronomy")); - assertThat(resp.getHits().getAt(2).getId(), equalTo("doc_1")); - assertThat(resp.getHits().getAt(2).field(TOPIC_FIELD).getValue().toString(), equalTo("technology")); - assertThat(resp.getHits().getAt(2).getInnerHits().get("a").getAt(0).getId(), equalTo("doc_4")); - assertThat(resp.getHits().getAt(2).getInnerHits().get("a").getAt(1).getId(), equalTo("doc_3")); - assertThat(resp.getHits().getAt(2).getInnerHits().get("a").getAt(2).getId(), equalTo("doc_1")); - assertThat(resp.getHits().getAt(3).getId(), equalTo("doc_7")); - assertThat(resp.getHits().getAt(3).field(TOPIC_FIELD).getValue().toString(), equalTo("biology")); - }); - } - - public void testRankDocsRetrieverWithNestedCollapseAndAggs() { - final int rankWindowSize = 10; - SearchSourceBuilder source = new SearchSourceBuilder(); - StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder(); - // this one retrieves docs 1 and 6 as doc_4 is collapsed to doc_1 - standard0.queryBuilder = QueryBuilders.boolQuery() - .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_1")).boost(10L)) - .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_4")).boost(9L)) - .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(8L)); - standard0.collapseBuilder = new CollapseBuilder(TOPIC_FIELD).setInnerHits( - new InnerHitBuilder("a").addSort(new FieldSortBuilder(DOC_FIELD).order(SortOrder.DESC)).setSize(10) - ); - StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder(); - // this one retrieves docs 2 and 6 due to prefilter - standard1.queryBuilder = QueryBuilders.constantScoreQuery(QueryBuilders.termsQuery(ID_FIELD, "doc_2", "doc_3", "doc_6")).boost(20L); - standard1.preFilterQueryBuilders.add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); - // this one retrieves docs 7, 2, 3, and 6 - KnnRetrieverBuilder knnRetrieverBuilder = new KnnRetrieverBuilder( - VECTOR_FIELD, - new float[] { 3.0f, 3.0f, 3.0f }, - null, - 10, - 100, - null - ); - // the compound retriever here produces a score for a doc based on the percentage of the queries that it was matched on and - // resolves ties based on actual score, and then the doc (we're forcing 1 shard for consistent results) - // so ideal rank would be: 6, 2, 1, 3, 4, 7 - source.retriever( - new CompoundRetrieverWithRankDocs( - rankWindowSize, - Arrays.asList( - new RetrieverSource(standard0, null), - new RetrieverSource(standard1, null), - new RetrieverSource(knnRetrieverBuilder, null) - ) - ) - ); - source.aggregation(new TermsAggregationBuilder("topic").field(TOPIC_FIELD)); - SearchRequestBuilder req = client().prepareSearch(INDEX).setSource(source); - ElasticsearchAssertions.assertResponse(req, resp -> { - assertNull(resp.pointInTimeId()); - assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); - assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_6")); - assertNotNull(resp.getAggregations()); - assertNotNull(resp.getAggregations().get("topic")); - Terms terms = resp.getAggregations().get("topic"); - // doc_3 is not part of the final aggs computation as it is only retrieved through the knn retriever - // and is outside of the rank window - assertThat(terms.getBucketByKey("technology").getDocCount(), equalTo(3L)); - assertThat(terms.getBucketByKey("astronomy").getDocCount(), equalTo(1L)); - assertThat(terms.getBucketByKey("biology").getDocCount(), equalTo(1L)); - }); - } - - public void testRankDocsRetrieverWithNestedQuery() { - final int rankWindowSize = 100; - SearchSourceBuilder source = new SearchSourceBuilder(); - StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder(); - // this one retrieves docs 1, 4, and 6 - standard0.queryBuilder = QueryBuilders.nestedQuery("views", QueryBuilders.rangeQuery(LAST_30D_FIELD).gt(10L), ScoreMode.Avg); - StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder(); - // this one retrieves docs 2 and 6 due to prefilter - standard1.queryBuilder = QueryBuilders.constantScoreQuery(QueryBuilders.termsQuery(ID_FIELD, "doc_2", "doc_3", "doc_6")).boost(20L); - standard1.preFilterQueryBuilders.add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); - // this one retrieves docs 7, 2, 3, and 6 - KnnRetrieverBuilder knnRetrieverBuilder = new KnnRetrieverBuilder( - VECTOR_FIELD, - new float[] { 3.0f, 3.0f, 3.0f }, - null, - 10, - 100, - null - ); - // the compound retriever here produces a score for a doc based on the percentage of the queries that it was matched on and - // resolves ties based on actual score, and then the doc (we're forcing 1 shard for consistent results) - // so ideal rank would be: 6, 2, 1, 3, 4, 7 - source.retriever( - new CompoundRetrieverWithRankDocs( - rankWindowSize, - Arrays.asList( - new RetrieverSource(standard0, null), - new RetrieverSource(standard1, null), - new RetrieverSource(knnRetrieverBuilder, null) - ) - ) - ); - source.fetchField(TOPIC_FIELD); - SearchRequestBuilder req = client().prepareSearch(INDEX).setSource(source); - ElasticsearchAssertions.assertResponse(req, resp -> { - assertNull(resp.pointInTimeId()); - assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); - assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_6")); - assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_2")); - assertThat(resp.getHits().getAt(2).getId(), equalTo("doc_1")); - assertThat(resp.getHits().getAt(3).getId(), equalTo("doc_3")); - assertThat(resp.getHits().getAt(4).getId(), equalTo("doc_4")); - assertThat(resp.getHits().getAt(5).getId(), equalTo("doc_7")); - }); - } - - public void testRankDocsRetrieverMultipleCompoundRetrievers() { - final int rankWindowSize = 100; - SearchSourceBuilder source = new SearchSourceBuilder(); - StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder(); - // this one retrieves docs 1, 4, and 6 - standard0.queryBuilder = QueryBuilders.boolQuery() - .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_1")).boost(10L)) - .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_4")).boost(9L)) - .should(QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_6")).boost(8L)); - StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder(); - // this one retrieves docs 2 and 6 due to prefilter - standard1.queryBuilder = QueryBuilders.constantScoreQuery(QueryBuilders.termsQuery(ID_FIELD, "doc_2", "doc_3", "doc_6")).boost(20L); - standard1.preFilterQueryBuilders.add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); - // this one retrieves docs 7, 2, 3, and 6 - KnnRetrieverBuilder knnRetrieverBuilder = new KnnRetrieverBuilder( - VECTOR_FIELD, - new float[] { 3.0f, 3.0f, 3.0f }, - null, - 10, - 100, - null - ); - // the compound retriever here produces a score for a doc based on the percentage of the queries that it was matched on and - // resolves ties based on actual score, rank, and then the doc (we're forcing 1 shard for consistent results) - // so ideal rank would be: 6, 2, 1, 4, 7, 3 - CompoundRetrieverWithRankDocs compoundRetriever1 = new CompoundRetrieverWithRankDocs( - rankWindowSize, - Arrays.asList( - new RetrieverSource(standard0, null), - new RetrieverSource(standard1, null), - new RetrieverSource(knnRetrieverBuilder, null) - ) - ); - // simple standard retriever that would have the doc_4 as its first (and only) result - StandardRetrieverBuilder standard2 = new StandardRetrieverBuilder(); - standard2.queryBuilder = QueryBuilders.queryStringQuery("aardvark").defaultField(TEXT_FIELD); - - // combining the two retrievers would bring doc_4 at the top as it would be the only one present in both doc sets - // the rest of the docs would be sorted based on their ranks as they have the same score (1/2) - source.retriever( - new CompoundRetrieverWithRankDocs( - rankWindowSize, - Arrays.asList(new RetrieverSource(compoundRetriever1, null), new RetrieverSource(standard2, null)) - ) - ); - - SearchRequestBuilder req = client().prepareSearch(INDEX).setSource(source); - ElasticsearchAssertions.assertResponse(req, resp -> { - assertNull(resp.pointInTimeId()); - assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); - assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_4")); - assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_1")); - assertThat(resp.getHits().getAt(2).getId(), equalTo("doc_2")); - assertThat(resp.getHits().getAt(3).getId(), equalTo("doc_3")); - assertThat(resp.getHits().getAt(4).getId(), equalTo("doc_6")); - assertThat(resp.getHits().getAt(5).getId(), equalTo("doc_7")); - }); - } - - public void testRankDocsRetrieverDifferentNestedSorting() { - final int rankWindowSize = 100; - SearchSourceBuilder source = new SearchSourceBuilder(); - StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder(); - // this one retrieves docs 1, 4, 6, 2 - standard0.queryBuilder = QueryBuilders.nestedQuery("views", QueryBuilders.rangeQuery(LAST_30D_FIELD).gt(0), ScoreMode.Avg); - standard0.sortBuilders = List.of( - new FieldSortBuilder(LAST_30D_FIELD).setNestedSort(new NestedSortBuilder("views")).order(SortOrder.DESC) - ); - StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder(); - // this one retrieves docs 4, 7 - standard1.queryBuilder = QueryBuilders.nestedQuery("views", QueryBuilders.rangeQuery(ALL_TIME_FIELD).gt(0), ScoreMode.Avg); - standard1.sortBuilders = List.of( - new FieldSortBuilder(ALL_TIME_FIELD).setNestedSort(new NestedSortBuilder("views")).order(SortOrder.ASC) - ); - - source.retriever( - new CompoundRetrieverWithRankDocs( - rankWindowSize, - Arrays.asList(new RetrieverSource(standard0, null), new RetrieverSource(standard1, null)) - ) - ); - - SearchRequestBuilder req = client().prepareSearch(INDEX).setSource(source); - ElasticsearchAssertions.assertResponse(req, resp -> { - assertNull(resp.pointInTimeId()); - assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(5L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); - assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_4")); - assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_1")); - assertThat(resp.getHits().getAt(2).getId(), equalTo("doc_2")); - assertThat(resp.getHits().getAt(3).getId(), equalTo("doc_6")); - assertThat(resp.getHits().getAt(4).getId(), equalTo("doc_7")); - }); - } - - class CompoundRetrieverWithRankDocs extends RetrieverBuilder { - - private final List sources; - private final int rankWindowSize; - - private CompoundRetrieverWithRankDocs(int rankWindowSize, List sources) { - this.rankWindowSize = rankWindowSize; - this.sources = Collections.unmodifiableList(sources); - } - - @Override - public boolean isCompound() { - return true; - } - - @Override - public QueryBuilder topDocsQuery() { - throw new UnsupportedOperationException("should not be called"); - } - - @Override - public RetrieverBuilder rewrite(QueryRewriteContext ctx) throws IOException { - if (ctx.getPointInTimeBuilder() == null) { - throw new IllegalStateException("PIT is required"); - } - - // Rewrite prefilters - boolean hasChanged = false; - var newPreFilters = rewritePreFilters(ctx); - hasChanged |= newPreFilters != preFilterQueryBuilders; - - // Rewrite retriever sources - List newRetrievers = new ArrayList<>(); - for (var entry : sources) { - RetrieverBuilder newRetriever = entry.retriever.rewrite(ctx); - if (newRetriever != entry.retriever) { - newRetrievers.add(new RetrieverSource(newRetriever, null)); - hasChanged |= newRetriever != entry.retriever; - } else if (newRetriever == entry.retriever) { - var sourceBuilder = entry.source != null - ? entry.source - : createSearchSourceBuilder(ctx.getPointInTimeBuilder(), newRetriever); - var rewrittenSource = sourceBuilder.rewrite(ctx); - newRetrievers.add(new RetrieverSource(newRetriever, rewrittenSource)); - hasChanged |= rewrittenSource != entry.source; - } - } - if (hasChanged) { - return new CompoundRetrieverWithRankDocs(rankWindowSize, newRetrievers); - } - - // execute searches - final SetOnce results = new SetOnce<>(); - final MultiSearchRequest multiSearchRequest = new MultiSearchRequest(); - for (var entry : sources) { - SearchRequest searchRequest = new SearchRequest().source(entry.source); - // The can match phase can reorder shards, so we disable it to ensure the stable ordering - searchRequest.setPreFilterShardSize(Integer.MAX_VALUE); - multiSearchRequest.add(searchRequest); - } - ctx.registerAsyncAction((client, listener) -> { - client.execute(TransportMultiSearchAction.TYPE, multiSearchRequest, new ActionListener<>() { - @Override - public void onResponse(MultiSearchResponse items) { - List topDocs = new ArrayList<>(); - for (int i = 0; i < items.getResponses().length; i++) { - var item = items.getResponses()[i]; - var rankDocs = getRankDocs(item.getResponse()); - sources.get(i).retriever().setRankDocs(rankDocs); - topDocs.add(rankDocs); - } - results.set(combineResults(topDocs)); - listener.onResponse(null); - } - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - }); - }); - - return new RankDocsRetrieverBuilder( - rankWindowSize, - newRetrievers.stream().map(s -> s.retriever).toList(), - results::get, - newPreFilters - ); - } - - @Override - public void extractToSearchSourceBuilder(SearchSourceBuilder searchSourceBuilder, boolean compoundUsed) { - throw new UnsupportedOperationException("should not be called"); - } - - @Override - public String getName() { - return "compound_retriever"; - } - - @Override - protected void doToXContent(XContentBuilder builder, Params params) throws IOException { - - } - - @Override - protected boolean doEquals(Object o) { - return false; - } - - @Override - protected int doHashCode() { - return 0; - } - - private RankDoc[] getRankDocs(SearchResponse searchResponse) { - assert searchResponse != null; - int size = Math.min(rankWindowSize, searchResponse.getHits().getHits().length); - RankDoc[] docs = new RankDoc[size]; - for (int i = 0; i < size; i++) { - var hit = searchResponse.getHits().getAt(i); - long sortValue = (long) hit.getRawSortValues()[hit.getRawSortValues().length - 1]; - int doc = ShardDocSortField.decodeDoc(sortValue); - int shardRequestIndex = ShardDocSortField.decodeShardRequestIndex(sortValue); - docs[i] = new RankDoc(doc, hit.getScore(), shardRequestIndex); - docs[i].rank = i + 1; - } - return docs; - } - - record RankDocAndHitRatio(RankDoc rankDoc, float hitRatio) {} - - /** - * Combines the provided {@code rankResults} to return the final top documents. - */ - public RankDoc[] combineResults(List rankResults) { - int totalQueries = rankResults.size(); - final float step = 1.0f / totalQueries; - Map docsToRankResults = Maps.newMapWithExpectedSize(rankWindowSize); - for (var rankResult : rankResults) { - for (RankDoc scoreDoc : rankResult) { - docsToRankResults.compute(new RankDoc.RankKey(scoreDoc.doc, scoreDoc.shardIndex), (key, value) -> { - if (value == null) { - RankDoc res = new RankDoc(scoreDoc.doc, scoreDoc.score, scoreDoc.shardIndex); - res.rank = scoreDoc.rank; - return new RankDocAndHitRatio(res, step); - } else { - RankDoc res = new RankDoc(scoreDoc.doc, Math.max(scoreDoc.score, value.rankDoc.score), scoreDoc.shardIndex); - res.rank = Math.min(scoreDoc.rank, value.rankDoc.rank); - return new RankDocAndHitRatio(res, value.hitRatio + step); - } - }); - } - } - // sort the results based on hit ratio, then doc, then rank, and final tiebreaker is based on smaller doc id - RankDocAndHitRatio[] sortedResults = docsToRankResults.values().toArray(RankDocAndHitRatio[]::new); - Arrays.sort(sortedResults, (RankDocAndHitRatio doc1, RankDocAndHitRatio doc2) -> { - if (doc1.hitRatio != doc2.hitRatio) { - return doc1.hitRatio < doc2.hitRatio ? 1 : -1; - } - if (false == (Float.isNaN(doc1.rankDoc.score) || Float.isNaN(doc2.rankDoc.score)) - && (doc1.rankDoc.score != doc2.rankDoc.score)) { - return doc1.rankDoc.score < doc2.rankDoc.score ? 1 : -1; - } - if (doc1.rankDoc.rank != doc2.rankDoc.rank) { - return doc1.rankDoc.rank < doc2.rankDoc.rank ? -1 : 1; - } - return doc1.rankDoc.doc < doc2.rankDoc.doc ? -1 : 1; - }); - // trim the results if needed, otherwise each shard will always return `rank_window_size` results. - // pagination and all else will happen on the coordinator when combining the shard responses - RankDoc[] topResults = new RankDoc[Math.min(rankWindowSize, sortedResults.length)]; - for (int rank = 0; rank < topResults.length; ++rank) { - topResults[rank] = sortedResults[rank].rankDoc; - topResults[rank].rank = rank + 1; - topResults[rank].score = sortedResults[rank].hitRatio; - } - return topResults; - } - } - - private SearchSourceBuilder createSearchSourceBuilder(PointInTimeBuilder pit, RetrieverBuilder retrieverBuilder) { - var sourceBuilder = new SearchSourceBuilder().pointInTimeBuilder(pit).trackTotalHits(false).size(100); - retrieverBuilder.extractToSearchSourceBuilder(sourceBuilder, false); - - // Record the shard id in the sort result - List> sortBuilders = sourceBuilder.sorts() != null ? new ArrayList<>(sourceBuilder.sorts()) : new ArrayList<>(); - if (sortBuilders.isEmpty()) { - sortBuilders.add(new ScoreSortBuilder()); - } - sortBuilders.add(new FieldSortBuilder(FieldSortBuilder.SHARD_DOC_FIELD_NAME)); - sourceBuilder.sort(sortBuilders); - return sourceBuilder; - } -} diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java index 2e7bc44811bf..be64d34dc876 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java @@ -33,7 +33,6 @@ import java.util.Arrays; import java.util.Collection; import java.util.List; -import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -98,7 +97,7 @@ public class RRFRetrieverBuilderIT extends ESIntegTestCase { } } """; - createIndex(INDEX, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0).build()); + createIndex(INDEX, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 5)).build()); admin().indices().preparePutMapping(INDEX).setSource(mapping, XContentType.JSON).get(); indexDoc(INDEX, "doc_1", DOC_FIELD, "doc_1", TOPIC_FIELD, "technology", TEXT_FIELD, "term"); indexDoc( @@ -167,8 +166,8 @@ public class RRFRetrieverBuilderIT extends ESIntegTestCase { QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2", "doc_3", "doc_6")).boost(20L) ); standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); - // this one retrieves docs 3, 2, 6, and 7 - KnnRetrieverBuilder knnRetrieverBuilder = new KnnRetrieverBuilder(VECTOR_FIELD, new float[] { 4.0f }, null, 10, 100, null); + // this one retrieves docs 2, 3, 6, and 7 + KnnRetrieverBuilder knnRetrieverBuilder = new KnnRetrieverBuilder(VECTOR_FIELD, new float[] { 2.0f }, null, 10, 100, null); source.retriever( new RRFRetrieverBuilder( Arrays.asList( @@ -214,8 +213,8 @@ public class RRFRetrieverBuilderIT extends ESIntegTestCase { QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2", "doc_3", "doc_6")).boost(20L) ); standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); - // this one retrieves docs 3, 2, 6, and 7 - KnnRetrieverBuilder knnRetrieverBuilder = new KnnRetrieverBuilder(VECTOR_FIELD, new float[] { 4.0f }, null, 10, 100, null); + // this one retrieves docs 2, 3, 6, and 7 + KnnRetrieverBuilder knnRetrieverBuilder = new KnnRetrieverBuilder(VECTOR_FIELD, new float[] { 2.0f }, null, 10, 100, null); source.retriever( new RRFRetrieverBuilder( Arrays.asList( @@ -266,8 +265,8 @@ public class RRFRetrieverBuilderIT extends ESIntegTestCase { QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2", "doc_3", "doc_6")).boost(20L) ); standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); - // this one retrieves docs 3, 2, 6, and 7 - KnnRetrieverBuilder knnRetrieverBuilder = new KnnRetrieverBuilder(VECTOR_FIELD, new float[] { 4.0f }, null, 10, 100, null); + // this one retrieves docs 2, 3, 6, and 7 + KnnRetrieverBuilder knnRetrieverBuilder = new KnnRetrieverBuilder(VECTOR_FIELD, new float[] { 2.0f }, null, 10, 100, null); source.retriever( new RRFRetrieverBuilder( Arrays.asList( @@ -320,8 +319,8 @@ public class RRFRetrieverBuilderIT extends ESIntegTestCase { QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2", "doc_3", "doc_6")).boost(20L) ); standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); - // this one retrieves docs 3, 2, 6, and 7 - KnnRetrieverBuilder knnRetrieverBuilder = new KnnRetrieverBuilder(VECTOR_FIELD, new float[] { 4.0f }, null, 10, 100, null); + // this one retrieves docs 2, 3, 6, and 7 + KnnRetrieverBuilder knnRetrieverBuilder = new KnnRetrieverBuilder(VECTOR_FIELD, new float[] { 2.0f }, null, 10, 100, null); source.retriever( new RRFRetrieverBuilder( Arrays.asList( @@ -383,8 +382,8 @@ public class RRFRetrieverBuilderIT extends ESIntegTestCase { QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2", "doc_3", "doc_6")).boost(20L) ); standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); - // this one retrieves docs 3, 2, 6, and 7 - KnnRetrieverBuilder knnRetrieverBuilder = new KnnRetrieverBuilder(VECTOR_FIELD, new float[] { 4.0f }, null, 10, 100, null); + // this one retrieves docs 2, 3, 6, and 7 + KnnRetrieverBuilder knnRetrieverBuilder = new KnnRetrieverBuilder(VECTOR_FIELD, new float[] { 2.0f }, null, 10, 100, null); source.retriever( new RRFRetrieverBuilder( Arrays.asList( @@ -446,8 +445,8 @@ public class RRFRetrieverBuilderIT extends ESIntegTestCase { QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2", "doc_3", "doc_6")).boost(20L) ); standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); - // this one retrieves docs 3, 2, 6, and 7 - KnnRetrieverBuilder knnRetrieverBuilder = new KnnRetrieverBuilder(VECTOR_FIELD, new float[] { 4.0f }, null, 10, 100, null); + // this one retrieves docs 2, 3, 6, and 7 + KnnRetrieverBuilder knnRetrieverBuilder = new KnnRetrieverBuilder(VECTOR_FIELD, new float[] { 2.0f }, null, 10, 100, null); source.retriever( new RRFRetrieverBuilder( Arrays.asList( @@ -474,13 +473,12 @@ public class RRFRetrieverBuilderIT extends ESIntegTestCase { assertThat(resp.getHits().getAt(0).getExplanation().getDetails().length, equalTo(2)); var rrfDetails = resp.getHits().getAt(0).getExplanation().getDetails()[0]; assertThat(rrfDetails.getDetails().length, equalTo(3)); - assertThat(rrfDetails.getDescription(), containsString("computed for initial ranks [2, 1, 2]")); + assertThat(rrfDetails.getDescription(), containsString("computed for initial ranks [2, 1, 1]")); - assertThat(rrfDetails.getDetails()[0].getDescription(), containsString("for rank [2] in query at index [0]")); assertThat(rrfDetails.getDetails()[0].getDescription(), containsString("for rank [2] in query at index [0]")); assertThat(rrfDetails.getDetails()[0].getDescription(), containsString("[my_custom_retriever]")); assertThat(rrfDetails.getDetails()[1].getDescription(), containsString("for rank [1] in query at index [1]")); - assertThat(rrfDetails.getDetails()[2].getDescription(), containsString("for rank [2] in query at index [2]")); + assertThat(rrfDetails.getDetails()[2].getDescription(), containsString("for rank [1] in query at index [2]")); }); } @@ -503,8 +501,8 @@ public class RRFRetrieverBuilderIT extends ESIntegTestCase { QueryBuilders.constantScoreQuery(QueryBuilders.idsQuery().addIds("doc_2", "doc_3", "doc_6")).boost(20L) ); standard1.getPreFilterQueryBuilders().add(QueryBuilders.queryStringQuery("search").defaultField(TEXT_FIELD)); - // this one retrieves docs 3, 2, 6, and 7 - KnnRetrieverBuilder knnRetrieverBuilder = new KnnRetrieverBuilder(VECTOR_FIELD, new float[] { 4.0f }, null, 10, 100, null); + // this one retrieves docs 2, 3, 6, and 7 + KnnRetrieverBuilder knnRetrieverBuilder = new KnnRetrieverBuilder(VECTOR_FIELD, new float[] { 2.0f }, null, 10, 100, null); RRFRetrieverBuilder nestedRRF = new RRFRetrieverBuilder( Arrays.asList( diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderNestedDocsIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderNestedDocsIT.java index 512874e5009f..ea251917cfae 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderNestedDocsIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderNestedDocsIT.java @@ -12,6 +12,7 @@ import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.retriever.CompoundRetrieverBuilder; import org.elasticsearch.search.retriever.KnnRetrieverBuilder; @@ -21,8 +22,9 @@ import org.elasticsearch.xcontent.XContentType; import java.util.Arrays; -import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; public class RRFRetrieverBuilderNestedDocsIT extends RRFRetrieverBuilderIT { @@ -68,7 +70,7 @@ public class RRFRetrieverBuilderNestedDocsIT extends RRFRetrieverBuilderIT { } } """; - createIndex(INDEX, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0).build()); + createIndex(INDEX, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 5)).build()); admin().indices().preparePutMapping(INDEX).setSource(mapping, XContentType.JSON).get(); indexDoc(INDEX, "doc_1", DOC_FIELD, "doc_1", TOPIC_FIELD, "technology", TEXT_FIELD, "term", LAST_30D_FIELD, 100); indexDoc( @@ -134,9 +136,9 @@ public class RRFRetrieverBuilderNestedDocsIT extends RRFRetrieverBuilderIT { final int rankWindowSize = 100; final int rankConstant = 10; SearchSourceBuilder source = new SearchSourceBuilder(); - // this one retrieves docs 1, 4 + // this one retrieves docs 1 StandardRetrieverBuilder standard0 = new StandardRetrieverBuilder( - QueryBuilders.nestedQuery("views", QueryBuilders.rangeQuery(LAST_30D_FIELD).gte(30L), ScoreMode.Avg) + QueryBuilders.nestedQuery("views", QueryBuilders.rangeQuery(LAST_30D_FIELD).gte(50L), ScoreMode.Avg) ); // this one retrieves docs 2 and 6 due to prefilter StandardRetrieverBuilder standard1 = new StandardRetrieverBuilder( @@ -157,16 +159,21 @@ public class RRFRetrieverBuilderNestedDocsIT extends RRFRetrieverBuilderIT { ) ); source.fetchField(TOPIC_FIELD); + source.explain(true); SearchRequestBuilder req = client().prepareSearch(INDEX).setSource(source); ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(4L)); + assertThat(resp.getHits().getTotalHits().value, equalTo(3L)); assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_6")); - assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_1")); - assertThat(resp.getHits().getAt(2).getId(), equalTo("doc_2")); - assertThat(resp.getHits().getAt(3).getId(), equalTo("doc_4")); + assertThat((double) resp.getHits().getAt(0).getScore(), closeTo(0.1742, 1e-4)); + assertThat( + Arrays.stream(resp.getHits().getHits()).skip(1).map(SearchHit::getId).toList(), + containsInAnyOrder("doc_1", "doc_2") + ); + assertThat((double) resp.getHits().getAt(1).getScore(), closeTo(0.0909, 1e-4)); + assertThat((double) resp.getHits().getAt(2).getScore(), closeTo(0.0909, 1e-4)); }); } } diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/350_rrf_retriever_pagination.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/350_rrf_retriever_pagination.yml index 47ba3658bb38..d5d7a5de1dc7 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/350_rrf_retriever_pagination.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/350_rrf_retriever_pagination.yml @@ -1,6 +1,8 @@ setup: - skip: - features: close_to + features: + - close_to + - contains - requires: cluster_features: 'rrf_retriever_composition_supported' @@ -10,8 +12,6 @@ setup: indices.create: index: test body: - settings: - number_of_shards: 1 mappings: properties: number_val: @@ -81,35 +81,49 @@ setup: bool: { should: [ { - term: { - number_val: { - value: "1", - boost: 10.0 - } + constant_score: { + filter: { + term: { + number_val: { + value: "1" + } + } + }, + boost: 10.0 + } + },{ + constant_score: { + filter: { + term: { + number_val: { + value: "2" + } + } + }, + boost: 9.0 + } }, + { + constant_score: { + filter: { + term: { + number_val: { + value: "3" + } + } + }, + boost: 8.0 } }, { - term: { - number_val: { - value: "2", - boost: 9.0 - } - } - }, - { - term: { - number_val: { - value: "3", - boost: 8.0 - } - } - }, - { - term: { - number_val: { - value: "4", - boost: 7.0 - } + constant_score: { + filter: { + term: { + number_val: { + value: "4" + } + } + }, + boost: 7.0 } } ] @@ -124,35 +138,51 @@ setup: bool: { should: [ { - term: { - char_val: { - value: "A", - boost: 10.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "A" + } + } + }, + boost: 10.0 } }, { - term: { - char_val: { - value: "B", - boost: 9.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "B" + } + } + }, + boost: 9.0 } }, { - term: { - char_val: { - value: "C", - boost: 8.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "C" + } + } + }, + boost: 8.0 } }, { - term: { - char_val: { - value: "D", - boost: 7.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "D" + } + } + }, + boost: 7.0 } } ] @@ -198,35 +228,49 @@ setup: bool: { should: [ { - term: { - number_val: { - value: "1", - boost: 10.0 - } + constant_score: { + filter: { + term: { + number_val: { + value: "1" + } + } + }, + boost: 10.0 + } + },{ + constant_score: { + filter: { + term: { + number_val: { + value: "2" + } + } + }, + boost: 9.0 + } }, + { + constant_score: { + filter: { + term: { + number_val: { + value: "3" + } + } + }, + boost: 8.0 } }, { - term: { - number_val: { - value: "2", - boost: 9.0 - } - } - }, - { - term: { - number_val: { - value: "3", - boost: 8.0 - } - } - }, - { - term: { - number_val: { - value: "4", - boost: 7.0 - } + constant_score: { + filter: { + term: { + number_val: { + value: "4" + } + } + }, + boost: 7.0 } } ] @@ -241,35 +285,51 @@ setup: bool: { should: [ { - term: { - char_val: { - value: "A", - boost: 10.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "A" + } + } + }, + boost: 10.0 } }, { - term: { - char_val: { - value: "B", - boost: 9.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "B" + } + } + }, + boost: 9.0 } }, { - term: { - char_val: { - value: "C", - boost: 8.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "C" + } + } + }, + boost: 8.0 } }, { - term: { - char_val: { - value: "D", - boost: 7.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "D" + } + } + }, + boost: 7.0 } } ] @@ -306,35 +366,49 @@ setup: bool: { should: [ { - term: { - number_val: { - value: "1", - boost: 10.0 - } + constant_score: { + filter: { + term: { + number_val: { + value: "1" + } + } + }, + boost: 10.0 + } + },{ + constant_score: { + filter: { + term: { + number_val: { + value: "2" + } + } + }, + boost: 9.0 + } }, + { + constant_score: { + filter: { + term: { + number_val: { + value: "3" + } + } + }, + boost: 8.0 } }, { - term: { - number_val: { - value: "2", - boost: 9.0 - } - } - }, - { - term: { - number_val: { - value: "3", - boost: 8.0 - } - } - }, - { - term: { - number_val: { - value: "4", - boost: 7.0 - } + constant_score: { + filter: { + term: { + number_val: { + value: "4" + } + } + }, + boost: 7.0 } } ] @@ -349,35 +423,51 @@ setup: bool: { should: [ { - term: { - char_val: { - value: "A", - boost: 10.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "A" + } + } + }, + boost: 10.0 } }, { - term: { - char_val: { - value: "B", - boost: 9.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "B" + } + } + }, + boost: 9.0 } }, { - term: { - char_val: { - value: "C", - boost: 8.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "C" + } + } + }, + boost: 8.0 } }, { - term: { - char_val: { - value: "D", - boost: 7.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "D" + } + } + }, + boost: 7.0 } } ] @@ -422,35 +512,49 @@ setup: bool: { should: [ { - term: { - number_val: { - value: "1", - boost: 10.0 - } + constant_score: { + filter: { + term: { + number_val: { + value: "1" + } + } + }, + boost: 10.0 + } + },{ + constant_score: { + filter: { + term: { + number_val: { + value: "2" + } + } + }, + boost: 9.0 + } }, + { + constant_score: { + filter: { + term: { + number_val: { + value: "3" + } + } + }, + boost: 8.0 } }, { - term: { - number_val: { - value: "2", - boost: 9.0 - } - } - }, - { - term: { - number_val: { - value: "3", - boost: 8.0 - } - } - }, - { - term: { - number_val: { - value: "4", - boost: 7.0 - } + constant_score: { + filter: { + term: { + number_val: { + value: "4" + } + } + }, + boost: 7.0 } } ] @@ -465,35 +569,51 @@ setup: bool: { should: [ { - term: { - char_val: { - value: "D", - boost: 10.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "D" + } + } + }, + boost: 10.0 } }, { - term: { - char_val: { - value: "C", - boost: 9.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "C" + } + } + }, + boost: 9.0 } }, { - term: { - char_val: { - value: "A", - boost: 8.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "A" + } + } + }, + boost: 8.0 } }, { - term: { - char_val: { - value: "B", - boost: 7.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "B" + } + } + }, + boost: 7.0 } } ] @@ -533,35 +653,49 @@ setup: bool: { should: [ { - term: { - number_val: { - value: "1", - boost: 10.0 - } + constant_score: { + filter: { + term: { + number_val: { + value: "1" + } + } + }, + boost: 10.0 + } + },{ + constant_score: { + filter: { + term: { + number_val: { + value: "2" + } + } + }, + boost: 9.0 + } }, + { + constant_score: { + filter: { + term: { + number_val: { + value: "3" + } + } + }, + boost: 8.0 } }, { - term: { - number_val: { - value: "2", - boost: 9.0 - } - } - }, - { - term: { - number_val: { - value: "3", - boost: 8.0 - } - } - }, - { - term: { - number_val: { - value: "4", - boost: 7.0 - } + constant_score: { + filter: { + term: { + number_val: { + value: "4" + } + } + }, + boost: 7.0 } } ] @@ -576,35 +710,51 @@ setup: bool: { should: [ { - term: { - char_val: { - value: "D", - boost: 10.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "D" + } + } + }, + boost: 10.0 } }, { - term: { - char_val: { - value: "C", - boost: 9.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "C" + } + } + }, + boost: 9.0 } }, { - term: { - char_val: { - value: "A", - boost: 8.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "A" + } + } + }, + boost: 8.0 } }, { - term: { - char_val: { - value: "B", - boost: 7.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "B" + } + } + }, + boost: 7.0 } } ] @@ -632,9 +782,9 @@ setup: "Pagination within interleaved results, different result set sizes, rank_window_size covering all results": # perform multiple searches with different "from" parameter, ensuring that results are consistent # rank_window_size covers the entire result set for both queries, so pagination should be consistent - # queryA has a result set of [5, 1] and + # queryA has a result set of [1] and # queryB has a result set of [4, 3, 1, 2] - # so for rank_constant=10, the expected order is [1, 4, 5, 3, 2] + # so for rank_constant=10, the expected order is [1, 4, 3, 2] - do: search: index: test @@ -645,19 +795,11 @@ setup: { retrievers: [ { - # this should clause would generate the result set [5, 1] + # this should clause would generate the result set [1] standard: { query: { bool: { should: [ - { - term: { - number_val: { - value: "5", - boost: 10.0 - } - } - }, { term: { number_val: { @@ -678,35 +820,51 @@ setup: bool: { should: [ { - term: { - char_val: { - value: "D", - boost: 10.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "D" + } + } + }, + boost: 10.0 } }, { - term: { - char_val: { - value: "C", - boost: 9.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "C" + } + } + }, + boost: 9.0 } }, { - term: { - char_val: { - value: "A", - boost: 8.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "A" + } + } + }, + boost: 8.0 } }, { - term: { - char_val: { - value: "B", - boost: 7.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "B" + } + } + }, + boost: 7.0 } } ] @@ -721,11 +879,11 @@ setup: from : 0 size : 2 - - match: { hits.total.value : 5 } + - match: { hits.total.value : 4 } - length: { hits.hits : 2 } - match: { hits.hits.0._id: "1" } # score for doc 1 is (1/12 + 1/13) - - close_to: {hits.hits.0._score: {value: 0.1602, error: 0.001}} + - close_to: {hits.hits.0._score: {value: 0.1678, error: 0.001}} - match: { hits.hits.1._id: "4" } # score for doc 4 is (1/11) - close_to: {hits.hits.1._score: {value: 0.0909, error: 0.001}} @@ -740,19 +898,11 @@ setup: { retrievers: [ { - # this should clause would generate the result set [5, 1] + # this should clause would generate the result set [1] standard: { query: { bool: { should: [ - { - term: { - number_val: { - value: "5", - boost: 10.0 - } - } - }, { term: { number_val: { @@ -773,35 +923,51 @@ setup: bool: { should: [ { - term: { - char_val: { - value: "D", - boost: 10.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "D" + } + } + }, + boost: 10.0 } }, { - term: { - char_val: { - value: "C", - boost: 9.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "C" + } + } + }, + boost: 9.0 } }, { - term: { - char_val: { - value: "A", - boost: 8.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "A" + } + } + }, + boost: 8.0 } }, { - term: { - char_val: { - value: "B", - boost: 7.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "B" + } + } + }, + boost: 7.0 } } ] @@ -816,14 +982,15 @@ setup: from : 2 size : 2 - - match: { hits.total.value : 5 } + - match: { hits.total.value : 4 } - length: { hits.hits : 2 } - - match: { hits.hits.0._id: "5" } - # score for doc 5 is (1/11) - - close_to: {hits.hits.0._score: {value: 0.0909, error: 0.001}} - - match: { hits.hits.1._id: "3" } + - match: { hits.hits.0._id: "3" } # score for doc 3 is (1/12) - - close_to: {hits.hits.1._score: {value: 0.0833, error: 0.001}} + - close_to: {hits.hits.0._score: {value: 0.0833, error: 0.001}} + - match: { hits.hits.1._id: "2" } + # score for doc 2 is (1/14) + - close_to: {hits.hits.1._score: {value: 0.0714, error: 0.001}} + - do: search: @@ -835,19 +1002,11 @@ setup: { retrievers: [ { - # this should clause would generate the result set [5, 1] + # this should clause would generate the result set [1] standard: { query: { bool: { should: [ - { - term: { - number_val: { - value: "5", - boost: 10.0 - } - } - }, { term: { number_val: { @@ -868,35 +1027,51 @@ setup: bool: { should: [ { - term: { - char_val: { - value: "D", - boost: 10.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "D" + } + } + }, + boost: 10.0 } }, { - term: { - char_val: { - value: "C", - boost: 9.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "C" + } + } + }, + boost: 9.0 } }, { - term: { - char_val: { - value: "A", - boost: 8.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "A" + } + } + }, + boost: 8.0 } }, { - term: { - char_val: { - value: "B", - boost: 7.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "B" + } + } + }, + boost: 7.0 } } ] @@ -911,12 +1086,8 @@ setup: from: 4 size: 2 - - match: { hits.total.value: 5 } - - length: { hits.hits: 1 } - - match: { hits.hits.0._id: "2" } - # score for doc 2 is (1/14) - - close_to: {hits.hits.0._score: {value: 0.0714, error: 0.001}} - + - match: { hits.total.value: 4 } + - length: { hits.hits: 0 } --- "Pagination within interleaved results, different result set sizes, rank_window_size not covering all results": @@ -943,19 +1114,27 @@ setup: bool: { should: [ { - term: { - number_val: { - value: "5", - boost: 10.0 - } + constant_score: { + filter: { + term: { + number_val: { + value: "5" + } + } + }, + boost: 10.0 } }, { - term: { - number_val: { - value: "1", - boost: 9.0 - } + constant_score: { + filter: { + term: { + number_val: { + value: "1" + } + } + }, + boost: 9.0 } } ] @@ -970,35 +1149,51 @@ setup: bool: { should: [ { - term: { - char_val: { - value: "D", - boost: 10.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "D" + } + } + }, + boost: 10.0 } }, { - term: { - char_val: { - value: "C", - boost: 9.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "C" + } + } + }, + boost: 9.0 } }, { - term: { - char_val: { - value: "A", - boost: 8.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "A" + } + } + }, + boost: 8.0 } }, { - term: { - char_val: { - value: "B", - boost: 7.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "B" + } + } + }, + boost: 7.0 } } ] @@ -1015,11 +1210,11 @@ setup: - match: { hits.total.value : 5 } - length: { hits.hits : 2 } - - match: { hits.hits.0._id: "4" } - # score for doc 4 is (1/11) + - contains: { hits.hits: { _id: "4" } } + - contains: { hits.hits: { _id: "5" } } + + # both docs have the same score (1/11) - close_to: {hits.hits.0._score: {value: 0.0909, error: 0.001}} - - match: { hits.hits.1._id: "5" } - # score for doc 5 is (1/11) - close_to: {hits.hits.1._score: {value: 0.0909, error: 0.001}} - do: @@ -1038,19 +1233,27 @@ setup: bool: { should: [ { - term: { - number_val: { - value: "5", - boost: 10.0 - } + constant_score: { + filter: { + term: { + number_val: { + value: "5" + } + } + }, + boost: 10.0 } }, { - term: { - number_val: { - value: "1", - boost: 9.0 - } + constant_score: { + filter: { + term: { + number_val: { + value: "1" + } + } + }, + boost: 9.0 } } ] @@ -1065,35 +1268,51 @@ setup: bool: { should: [ { - term: { - char_val: { - value: "D", - boost: 10.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "D" + } + } + }, + boost: 10.0 } }, { - term: { - char_val: { - value: "C", - boost: 9.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "C" + } + } + }, + boost: 9.0 } }, { - term: { - char_val: { - value: "A", - boost: 8.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "A" + } + } + }, + boost: 8.0 } }, { - term: { - char_val: { - value: "B", - boost: 7.0 - } + constant_score: { + filter: { + term: { + char_val: { + value: "B" + } + } + }, + boost: 7.0 } } ] diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/700_rrf_retriever_search_api_compatibility.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/700_rrf_retriever_search_api_compatibility.yml index 1f7125377b89..517c162c33e9 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/700_rrf_retriever_search_api_compatibility.yml +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/700_rrf_retriever_search_api_compatibility.yml @@ -1,7 +1,6 @@ setup: - skip: features: close_to - - requires: cluster_features: 'rrf_retriever_composition_supported' reason: 'test requires rrf retriever composition support' @@ -10,8 +9,6 @@ setup: indices.create: index: test body: - settings: - number_of_shards: 1 mappings: properties: text: @@ -42,7 +39,7 @@ setup: index: test id: "1" body: - text: "term term term term term term term term term" + text: "term1" vector: [1.0] - do: @@ -50,7 +47,7 @@ setup: index: test id: "2" body: - text: "term term term term term term term term" + text: "term2" text_to_highlight: "search for the truth" keyword: "biology" vector: [2.0] @@ -60,8 +57,8 @@ setup: index: test id: "3" body: - text: "term term term term term term term" - text_to_highlight: "nothing related but still a match" + text: "term3" + text_to_highlight: "nothing related" keyword: "technology" vector: [3.0] @@ -70,14 +67,14 @@ setup: index: test id: "4" body: - text: "term term term term term term" + text: "term4" vector: [4.0] - do: index: index: test id: "5" body: - text: "term term term term term" + text: "term5" text_to_highlight: "You know, for Search!" keyword: "technology" integer: 5 @@ -87,7 +84,7 @@ setup: index: test id: "6" body: - text: "term term term term" + text: "term6" keyword: "biology" integer: 6 vector: [6.0] @@ -96,27 +93,26 @@ setup: index: test id: "7" body: - text: "term term term" + text: "term7" keyword: "astronomy" - vector: [7.0] + vector: [77.0] nested: { views: 50} - do: index: index: test id: "8" body: - text: "term term" + text: "term8" keyword: "technology" - vector: [8.0] nested: { views: 100} - do: index: index: test id: "9" body: - text: "term" + text: "term9" + integer: 2 keyword: "technology" - vector: [9.0] nested: { views: 10} - do: indices.refresh: {} @@ -133,6 +129,7 @@ setup: rrf: retrievers: [ { + # this one retrieves docs 6, 5, 4 knn: { field: vector, query_vector: [ 6.0 ], @@ -141,10 +138,72 @@ setup: } }, { + # this one retrieves docs 4, 5, 1, 2, 6 standard: { query: { - term: { - text: term + bool: { + should: [ + { + constant_score: { + filter: { + term: { + text: term4 + } + }, + boost: 10.0 + } + }, + { + constant_score: { + filter: { + term: { + text: term5 + } + }, + boost: 9.0 + } + }, + { + constant_score: { + filter: { + term: { + text: term1 + } + }, + boost: 8.0 + } + }, + { + constant_score: { + filter: { + term: { + text: term2 + } + }, + boost: 7.0 + } + }, + { + constant_score: { + filter: { + term: { + text: term6 + } + }, + boost: 6.0 + } + }, + { + constant_score: { + filter: { + exists: { + field: text + } + }, + boost: 1 + } + } + ] } } } @@ -158,9 +217,13 @@ setup: terms: field: keyword - - match: { hits.hits.0._id: "5" } - - match: { hits.hits.1._id: "1" } + + - match: { hits.hits.0._id: "4" } + - close_to: { hits.hits.0._score: { value: 0.1678, error: 0.001 } } + - match: { hits.hits.1._id: "5" } + - close_to: { hits.hits.1._score: { value: 0.1666, error: 0.001 } } - match: { hits.hits.2._id: "6" } + - close_to: { hits.hits.2._score: { value: 0.1575, error: 0.001 } } - match: { aggregations.keyword_aggs.buckets.0.key: "technology" } - match: { aggregations.keyword_aggs.buckets.0.doc_count: 4 } @@ -181,6 +244,7 @@ setup: rrf: retrievers: [ { + # this one retrieves docs 6, 5, 4 knn: { field: vector, query_vector: [ 6.0 ], @@ -189,10 +253,72 @@ setup: } }, { + # this one retrieves docs 4, 5, 1, 2, 6 standard: { query: { - term: { - text: term + bool: { + should: [ + { + constant_score: { + filter: { + term: { + text: term4 + } + }, + boost: 10.0 + } + }, + { + constant_score: { + filter: { + term: { + text: term5 + } + }, + boost: 9.0 + } + }, + { + constant_score: { + filter: { + term: { + text: term1 + } + }, + boost: 8.0 + } + }, + { + constant_score: { + filter: { + term: { + text: term2 + } + }, + boost: 7.0 + } + }, + { + constant_score: { + filter: { + term: { + text: term6 + } + }, + boost: 6.0 + } + }, + { + constant_score: { + filter: { + exists: { + field: text + } + }, + boost: 1 + } + } + ] } } } @@ -208,12 +334,14 @@ setup: lang: painless source: "_score" - - - match: { hits.hits.0._id: "5" } - - match: { hits.hits.1._id: "1" } + - match: { hits.hits.0._id: "4" } + - close_to: { hits.hits.0._score: { value: 0.1678, error: 0.001 } } + - match: { hits.hits.1._id: "5" } + - close_to: { hits.hits.1._score: { value: 0.1666, error: 0.001 } } - match: { hits.hits.2._id: "6" } + - close_to: { hits.hits.2._score: { value: 0.1575, error: 0.001 } } - - close_to: { aggregations.max_score.value: { value: 0.15, error: 0.001 }} + - close_to: { aggregations.max_score.value: { value: 0.1678, error: 0.001 }} --- "rrf retriever with top-level collapse": @@ -228,6 +356,7 @@ setup: rrf: retrievers: [ { + # this one retrieves docs 6, 5, 4 knn: { field: vector, query_vector: [ 6.0 ], @@ -236,10 +365,72 @@ setup: } }, { + # this one retrieves docs 4, 5, 1, 2, 6 standard: { query: { - term: { - text: term + bool: { + should: [ + { + constant_score: { + filter: { + term: { + text: term4 + } + }, + boost: 10.0 + } + }, + { + constant_score: { + filter: { + term: { + text: term5 + } + }, + boost: 9.0 + } + }, + { + constant_score: { + filter: { + term: { + text: term1 + } + }, + boost: 8.0 + } + }, + { + constant_score: { + filter: { + term: { + text: term2 + } + }, + boost: 7.0 + } + }, + { + constant_score: { + filter: { + term: { + text: term6 + } + }, + boost: 6.0 + } + }, + { + constant_score: { + filter: { + exists: { + field: text + } + }, + boost: 1 + } + } + ] } } } @@ -250,18 +441,23 @@ setup: size: 3 collapse: { field: keyword, inner_hits: { name: sub_hits, size: 2 } } - - match: { hits.hits.0._id: "5" } - - match: { hits.hits.1._id: "1" } + - match: { hits.total : 9 } + + - match: { hits.hits.0._id: "4" } + - close_to: { hits.hits.0._score: { value: 0.1678, error: 0.001 } } + - match: { hits.hits.1._id: "5" } + - close_to: { hits.hits.1._score: { value: 0.1666, error: 0.001 } } - match: { hits.hits.2._id: "6" } + - close_to: { hits.hits.2._score: { value: 0.1575, error: 0.001 } } - - match: { hits.hits.0.inner_hits.sub_hits.hits.total : 4 } - length: { hits.hits.0.inner_hits.sub_hits.hits.hits : 2 } - - match: { hits.hits.0.inner_hits.sub_hits.hits.hits.0._id: "5" } - - match: { hits.hits.0.inner_hits.sub_hits.hits.hits.1._id: "3" } + - match: { hits.hits.0.inner_hits.sub_hits.hits.hits.0._id: "4" } + - match: { hits.hits.0.inner_hits.sub_hits.hits.hits.1._id: "1" } + - match: { hits.hits.1.inner_hits.sub_hits.hits.total : 4 } - length: { hits.hits.1.inner_hits.sub_hits.hits.hits : 2 } - - match: { hits.hits.1.inner_hits.sub_hits.hits.hits.0._id: "1" } - - match: { hits.hits.1.inner_hits.sub_hits.hits.hits.1._id: "4" } + - match: { hits.hits.1.inner_hits.sub_hits.hits.hits.0._id: "5" } + - match: { hits.hits.1.inner_hits.sub_hits.hits.hits.1._id: "3" } - length: { hits.hits.2.inner_hits.sub_hits.hits.hits: 2 } - match: { hits.hits.2.inner_hits.sub_hits.hits.hits.0._id: "6" } @@ -280,18 +476,132 @@ setup: rrf: retrievers: [ { - knn: { - field: vector, - query_vector: [ 6.0 ], - k: 3, - num_candidates: 10 + # this one retrieves docs 7, 3 + standard: { + query: { + bool: { + should: [ + { + constant_score: { + filter: { + term: { + text: term7 + } + }, + boost: 10.0 + } + }, + { + constant_score: { + filter: { + term: { + text: term3 + } }, + boost: 9.0 + } + } + ] + } + } } }, { + # this one retrieves docs 1, 2, 3, 7 standard: { query: { - term: { - text: term + bool: { + should: [ + { + constant_score: { + filter: { + term: { + text: term1 + } + }, + boost: 10.0 + } + }, + { + constant_score: { + filter: { + term: { + text: term2 + } + }, + boost: 9.0 + } + }, + { + constant_score: { + filter: { + term: { + text: term3 + } + }, + boost: 8.0 + } + }, + { + constant_score: { + filter: { + term: { + text: term4 + } + }, + boost: 7.0 + } + }, + { + constant_score: { + filter: { + term: { + text: term5 + } + }, + boost: 6.0 + } + }, + { + constant_score: { + filter: { + term: { + text: term6 + } + }, + boost: 5.0 + } + }, + { + constant_score: { + filter: { + term: { + text: term7 + } + }, + boost: 4.0 + } + }, + { + constant_score: { + filter: { + term: { + text: term8 + } + }, + boost: 3.0 + } + }, + { + constant_score: { + filter: { + term: { + text: term9 + } + }, + boost: 2.0 + } + } + ] } }, collapse: { field: keyword, inner_hits: { name: sub_hits, size: 1 } } @@ -303,8 +613,9 @@ setup: size: 3 - match: { hits.hits.0._id: "7" } - - match: { hits.hits.1._id: "1" } - - match: { hits.hits.2._id: "6" } + - close_to: { hits.hits.0._score: { value: 0.1623, error: 0.001 } } + - match: { hits.hits.1._id: "3" } + - close_to: { hits.hits.1._score: { value: 0.1602, error: 0.001 } } --- "rrf retriever highlighting results": @@ -331,7 +642,7 @@ setup: standard: { query: { term: { - keyword: technology + text: term5 } } } @@ -349,7 +660,7 @@ setup: } } - - match: { hits.total : 5 } + - match: { hits.total : 2 } - match: { hits.hits.0._id: "5" } - match: { hits.hits.0.highlight.text_to_highlight.0: "You know, for Search!" } @@ -357,9 +668,6 @@ setup: - match: { hits.hits.1._id: "2" } - match: { hits.hits.1.highlight.text_to_highlight.0: "search for the truth" } - - match: { hits.hits.2._id: "3" } - - not_exists: hits.hits.2.highlight - --- "rrf retriever with custom nested sort": @@ -374,12 +682,103 @@ setup: retrievers: [ { # this one retrievers docs 1, 2, 3, .., 9 - # but due to sorting, it will revert the order to 6, 5, .., 9 which due to + # but due to sorting, it will revert the order to 6, 5, 9, ... which due to # rank_window_size: 2 will only return 6 and 5 standard: { query: { - term: { - text: term + bool: { + should: [ + { + constant_score: { + filter: { + term: { + text: term1 + } + }, + boost: 10.0 + } + }, + { + constant_score: { + filter: { + term: { + text: term2 + } + }, + boost: 9.0 + } + }, + { + constant_score: { + filter: { + term: { + text: term3 + } + }, + boost: 8.0 + } + }, + { + constant_score: { + filter: { + term: { + text: term4 + } + }, + boost: 7.0 + } + }, + { + constant_score: { + filter: { + term: { + text: term5 + } + }, + boost: 6.0 + } + }, + { + constant_score: { + filter: { + term: { + text: term6 + } + }, + boost: 5.0 + } + }, + { + constant_score: { + filter: { + term: { + text: term7 + } + }, + boost: 4.0 + } + }, + { + constant_score: { + filter: { + term: { + text: term8 + } + }, + boost: 3.0 + } + }, + { + constant_score: { + filter: { + term: { + text: term9 + } + }, + boost: 2.0 + } + } + ] } }, sort: [ @@ -410,7 +809,6 @@ setup: - length: {hits.hits: 2 } - match: { hits.hits.0._id: "6" } - - match: { hits.hits.1._id: "2" } --- "rrf retriever with nested query": @@ -427,7 +825,7 @@ setup: { knn: { field: vector, - query_vector: [ 7.0 ], + query_vector: [ 77.0 ], k: 1, num_candidates: 3 } From b76905939820dbf9340cdc4bd66d117645c83ffb Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Mon, 7 Oct 2024 14:31:47 -0300 Subject: [PATCH 14/85] IPinfo ASN and Country (#114192) Adding the building blocks to support IPinfo ASN and Country data --- .../elasticsearch/ingest/geoip/Database.java | 19 +- .../ingest/geoip/IpDataLookupFactories.java | 3 +- .../ingest/geoip/IpinfoIpDataLookups.java | 235 ++++++++++++++++++ .../ingest/geoip/GeoIpProcessorTests.java | 13 +- .../geoip/IpinfoIpDataLookupsTests.java | 223 +++++++++++++++++ .../ingest/geoip/MaxMindSupportTests.java | 6 + .../src/test/resources/ipinfo/asn_sample.mmdb | Bin 0 -> 25210 bytes .../test/resources/ipinfo/ip_asn_sample.mmdb | Bin 0 -> 23456 bytes .../resources/ipinfo/ip_country_sample.mmdb | Bin 0 -> 32292 bytes 9 files changed, 495 insertions(+), 4 deletions(-) create mode 100644 modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookups.java create mode 100644 modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java create mode 100644 modules/ingest-geoip/src/test/resources/ipinfo/asn_sample.mmdb create mode 100644 modules/ingest-geoip/src/test/resources/ipinfo/ip_asn_sample.mmdb create mode 100644 modules/ingest-geoip/src/test/resources/ipinfo/ip_country_sample.mmdb diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java index 52ca5eea52c1..31d7a43e3869 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java @@ -22,6 +22,10 @@ import java.util.Set; *

* A database has a set of properties that are valid to use with it (see {@link Database#properties()}), * as well as a list of default properties to use if no properties are specified (see {@link Database#defaultProperties()}). + *

+ * Some database providers have similar concepts but might have slightly different properties associated with those types. + * This can be accommodated, for example, by having a Foo value and a separate FooV2 value where the 'V' should be read as + * 'variant' or 'variation'. A V-less Database type is inherently the first variant/variation (i.e. V1). */ enum Database { @@ -137,6 +141,18 @@ enum Database { Property.MOBILE_COUNTRY_CODE, Property.MOBILE_NETWORK_CODE ) + ), + AsnV2( + Set.of( + Property.IP, + Property.ASN, + Property.ORGANIZATION_NAME, + Property.NETWORK, + Property.DOMAIN, + Property.COUNTRY_ISO_CODE, + Property.TYPE + ), + Set.of(Property.IP, Property.ASN, Property.ORGANIZATION_NAME, Property.NETWORK) ); private final Set properties; @@ -211,7 +227,8 @@ enum Database { MOBILE_COUNTRY_CODE, MOBILE_NETWORK_CODE, CONNECTION_TYPE, - USER_TYPE; + USER_TYPE, + TYPE; /** * Parses a string representation of a property into an actual Property instance. Not all properties that exist are diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpDataLookupFactories.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpDataLookupFactories.java index 990788978a0c..3379fdff0633 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpDataLookupFactories.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpDataLookupFactories.java @@ -76,6 +76,7 @@ final class IpDataLookupFactories { return database; } + @Nullable static Function, IpDataLookup> getMaxmindLookup(final Database database) { return switch (database) { case City -> MaxmindIpDataLookups.City::new; @@ -86,6 +87,7 @@ final class IpDataLookupFactories { case Domain -> MaxmindIpDataLookups.Domain::new; case Enterprise -> MaxmindIpDataLookups.Enterprise::new; case Isp -> MaxmindIpDataLookups.Isp::new; + default -> null; }; } @@ -97,7 +99,6 @@ final class IpDataLookupFactories { final Function, IpDataLookup> factoryMethod = getMaxmindLookup(database); - // note: this can't presently be null, but keep this check -- it will be useful in the near future if (factoryMethod == null) { throw new IllegalArgumentException("Unsupported database type [" + databaseType + "] for file [" + databaseFile + "]"); } diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookups.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookups.java new file mode 100644 index 000000000000..ac7f56468f37 --- /dev/null +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookups.java @@ -0,0 +1,235 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.ingest.geoip; + +import com.maxmind.db.DatabaseRecord; +import com.maxmind.db.MaxMindDbConstructor; +import com.maxmind.db.MaxMindDbParameter; +import com.maxmind.db.Reader; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.network.InetAddresses; +import org.elasticsearch.common.network.NetworkAddress; +import org.elasticsearch.core.Nullable; + +import java.io.IOException; +import java.net.InetAddress; +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; +import java.util.Set; + +/** + * A collection of {@link IpDataLookup} implementations for IPinfo databases + */ +final class IpinfoIpDataLookups { + + private IpinfoIpDataLookups() { + // utility class + } + + private static final Logger logger = LogManager.getLogger(IpinfoIpDataLookups.class); + + /** + * Lax-ly parses a string that (ideally) looks like 'AS123' into a Long like 123L (or null, if such parsing isn't possible). + * @param asn a potentially empty (or null) ASN string that is expected to contain 'AS' and then a parsable long + * @return the parsed asn + */ + static Long parseAsn(final String asn) { + if (asn == null || Strings.hasText(asn) == false) { + return null; + } else { + String stripped = asn.toUpperCase(Locale.ROOT).replaceAll("AS", "").trim(); + try { + return Long.parseLong(stripped); + } catch (NumberFormatException e) { + logger.trace("Unable to parse non-compliant ASN string [{}]", asn); + return null; + } + } + } + + public record AsnResult( + Long asn, + @Nullable String country, // not present in the free asn database + String domain, + String name, + @Nullable String type // not present in the free asn database + ) { + @SuppressWarnings("checkstyle:RedundantModifier") + @MaxMindDbConstructor + public AsnResult( + @MaxMindDbParameter(name = "asn") String asn, + @Nullable @MaxMindDbParameter(name = "country") String country, + @MaxMindDbParameter(name = "domain") String domain, + @MaxMindDbParameter(name = "name") String name, + @Nullable @MaxMindDbParameter(name = "type") String type + ) { + this(parseAsn(asn), country, domain, name, type); + } + } + + public record CountryResult( + @MaxMindDbParameter(name = "continent") String continent, + @MaxMindDbParameter(name = "continent_name") String continentName, + @MaxMindDbParameter(name = "country") String country, + @MaxMindDbParameter(name = "country_name") String countryName + ) { + @MaxMindDbConstructor + public CountryResult {} + } + + static class Asn extends AbstractBase { + Asn(Set properties) { + super(properties, AsnResult.class); + } + + @Override + protected Map transform(final Result result) { + AsnResult response = result.result; + Long asn = response.asn; + String organizationName = response.name; + String network = result.network; + + Map data = new HashMap<>(); + for (Database.Property property : this.properties) { + switch (property) { + case IP -> data.put("ip", result.ip); + case ASN -> { + if (asn != null) { + data.put("asn", asn); + } + } + case ORGANIZATION_NAME -> { + if (organizationName != null) { + data.put("organization_name", organizationName); + } + } + case NETWORK -> { + if (network != null) { + data.put("network", network); + } + } + case COUNTRY_ISO_CODE -> { + if (response.country != null) { + data.put("country_iso_code", response.country); + } + } + case DOMAIN -> { + if (response.domain != null) { + data.put("domain", response.domain); + } + } + case TYPE -> { + if (response.type != null) { + data.put("type", response.type); + } + } + } + } + return data; + } + } + + static class Country extends AbstractBase { + Country(Set properties) { + super(properties, CountryResult.class); + } + + @Override + protected Map transform(final Result result) { + CountryResult response = result.result; + + Map data = new HashMap<>(); + for (Database.Property property : this.properties) { + switch (property) { + case IP -> data.put("ip", result.ip); + case COUNTRY_ISO_CODE -> { + String countryIsoCode = response.country; + if (countryIsoCode != null) { + data.put("country_iso_code", countryIsoCode); + } + } + case COUNTRY_NAME -> { + String countryName = response.countryName; + if (countryName != null) { + data.put("country_name", countryName); + } + } + case CONTINENT_CODE -> { + String continentCode = response.continent; + if (continentCode != null) { + data.put("continent_code", continentCode); + } + } + case CONTINENT_NAME -> { + String continentName = response.continentName; + if (continentName != null) { + data.put("continent_name", continentName); + } + } + } + } + return data; + } + } + + /** + * Just a little record holder -- there's the data that we receive via the binding to our record objects from the Reader via the + * getRecord call, but then we also need to capture the passed-in ip address that came from the caller as well as the network for + * the returned DatabaseRecord from the Reader. + */ + public record Result(T result, String ip, String network) {} + + /** + * The {@link IpinfoIpDataLookups.AbstractBase} is an abstract base implementation of {@link IpDataLookup} that + * provides common functionality for getting a {@link IpinfoIpDataLookups.Result} that wraps a record from a {@link IpDatabase}. + * + * @param the record type that will be wrapped and returned + */ + private abstract static class AbstractBase implements IpDataLookup { + + protected final Set properties; + protected final Class clazz; + + AbstractBase(final Set properties, final Class clazz) { + this.properties = Set.copyOf(properties); + this.clazz = clazz; + } + + @Override + public Set getProperties() { + return this.properties; + } + + @Override + public final Map getData(final IpDatabase ipDatabase, final String ipAddress) { + final Result response = ipDatabase.getResponse(ipAddress, this::lookup); + return (response == null || response.result == null) ? Map.of() : transform(response); + } + + @Nullable + private Result lookup(final Reader reader, final String ipAddress) throws IOException { + final InetAddress ip = InetAddresses.forString(ipAddress); + final DatabaseRecord record = reader.getRecord(ip, clazz); + final RESPONSE data = record.getData(); + return (data == null) ? null : new Result<>(data, NetworkAddress.format(ip), record.getNetwork().toString()); + } + + /** + * Extract the configured properties from the retrieved response + * @param response the non-null response that was retrieved + * @return a mapping of properties for the ip from the response + */ + protected abstract Map transform(Result response); + } +} diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java index 793754ec316b..46024cb6ad21 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java @@ -24,6 +24,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; @@ -64,8 +65,16 @@ public class GeoIpProcessorTests extends ESTestCase { assertThat(Sets.difference(Database.Asn.properties(), Database.Isp.properties()), is(empty())); assertThat(Sets.difference(Database.Asn.defaultProperties(), Database.Isp.defaultProperties()), is(empty())); - // the enterprise database is like everything joined together - for (Database type : Database.values()) { + // the enterprise database is like these other databases joined together + for (Database type : Set.of( + Database.City, + Database.Country, + Database.Asn, + Database.AnonymousIp, + Database.ConnectionType, + Database.Domain, + Database.Isp + )) { assertThat(Sets.difference(type.properties(), Database.Enterprise.properties()), is(empty())); } // but in terms of the default fields, it's like a drop-in replacement for the city database diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java new file mode 100644 index 000000000000..905eb027626a --- /dev/null +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java @@ -0,0 +1,223 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.ingest.geoip; + +import com.maxmind.db.DatabaseRecord; +import com.maxmind.db.Networks; +import com.maxmind.db.Reader; + +import org.elasticsearch.common.network.NetworkAddress; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.watcher.ResourceWatcherService; +import org.junit.After; +import org.junit.Before; + +import java.io.File; +import java.io.IOException; +import java.net.InetAddress; +import java.nio.file.Path; +import java.util.Map; +import java.util.Set; +import java.util.function.BiConsumer; + +import static java.util.Map.entry; +import static org.elasticsearch.ingest.geoip.GeoIpTestUtils.copyDatabase; +import static org.elasticsearch.ingest.geoip.IpinfoIpDataLookups.parseAsn; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.startsWith; + +public class IpinfoIpDataLookupsTests extends ESTestCase { + + private ThreadPool threadPool; + private ResourceWatcherService resourceWatcherService; + + @Before + public void setup() { + threadPool = new TestThreadPool(ConfigDatabases.class.getSimpleName()); + Settings settings = Settings.builder().put("resource.reload.interval.high", TimeValue.timeValueMillis(100)).build(); + resourceWatcherService = new ResourceWatcherService(settings, threadPool); + } + + @After + public void cleanup() { + resourceWatcherService.close(); + threadPool.shutdownNow(); + } + + public void testDatabasePropertyInvariants() { + // the second ASN variant database is like a specialization of the ASN database + assertThat(Sets.difference(Database.Asn.properties(), Database.AsnV2.properties()), is(empty())); + assertThat(Database.Asn.defaultProperties(), equalTo(Database.AsnV2.defaultProperties())); + } + + public void testParseAsn() { + // expected case: "AS123" is 123 + assertThat(parseAsn("AS123"), equalTo(123L)); + // defensive cases: null and empty becomes null, this is not expected fwiw + assertThat(parseAsn(null), nullValue()); + assertThat(parseAsn(""), nullValue()); + // defensive cases: we strip whitespace and ignore case + assertThat(parseAsn(" as 456 "), equalTo(456L)); + // defensive cases: we ignore the absence of the 'AS' prefix + assertThat(parseAsn("123"), equalTo(123L)); + // bottom case: a non-parsable string is null + assertThat(parseAsn("anythingelse"), nullValue()); + } + + public void testAsn() throws IOException { + Path configDir = createTempDir(); + copyDatabase("ipinfo/ip_asn_sample.mmdb", configDir.resolve("ip_asn_sample.mmdb")); + copyDatabase("ipinfo/asn_sample.mmdb", configDir.resolve("asn_sample.mmdb")); + + GeoIpCache cache = new GeoIpCache(1000); // real cache to test purging of entries upon a reload + ConfigDatabases configDatabases = new ConfigDatabases(configDir, cache); + configDatabases.initialize(resourceWatcherService); + + // this is the 'free' ASN database (sample) + { + DatabaseReaderLazyLoader loader = configDatabases.getDatabase("ip_asn_sample.mmdb"); + IpDataLookup lookup = new IpinfoIpDataLookups.Asn(Set.of(Database.Property.values())); + Map data = lookup.getData(loader, "5.182.109.0"); + assertThat( + data, + equalTo( + Map.ofEntries( + entry("ip", "5.182.109.0"), + entry("organization_name", "M247 Europe SRL"), + entry("asn", 9009L), + entry("network", "5.182.109.0/24"), + entry("domain", "m247.com") + ) + ) + ); + } + + // this is the non-free or 'standard' ASN database (sample) + { + DatabaseReaderLazyLoader loader = configDatabases.getDatabase("asn_sample.mmdb"); + IpDataLookup lookup = new IpinfoIpDataLookups.Asn(Set.of(Database.Property.values())); + Map data = lookup.getData(loader, "23.53.116.0"); + assertThat( + data, + equalTo( + Map.ofEntries( + entry("ip", "23.53.116.0"), + entry("organization_name", "Akamai Technologies, Inc."), + entry("asn", 32787L), + entry("network", "23.53.116.0/24"), + entry("domain", "akamai.com"), + entry("type", "hosting"), + entry("country_iso_code", "US") + ) + ) + ); + } + } + + public void testAsnInvariants() { + Path configDir = createTempDir(); + copyDatabase("ipinfo/ip_asn_sample.mmdb", configDir.resolve("ip_asn_sample.mmdb")); + copyDatabase("ipinfo/asn_sample.mmdb", configDir.resolve("asn_sample.mmdb")); + + { + final Set expectedColumns = Set.of("network", "asn", "name", "domain"); + + Path databasePath = configDir.resolve("ip_asn_sample.mmdb"); + assertDatabaseInvariants(databasePath, (ip, row) -> { + assertThat(row.keySet(), equalTo(expectedColumns)); + String asn = (String) row.get("asn"); + assertThat(asn, startsWith("AS")); + assertThat(asn, equalTo(asn.trim())); + Long parsed = parseAsn(asn); + assertThat(parsed, notNullValue()); + assertThat(asn, equalTo("AS" + parsed)); // reverse it + }); + } + + { + final Set expectedColumns = Set.of("network", "asn", "name", "domain", "country", "type"); + + Path databasePath = configDir.resolve("asn_sample.mmdb"); + assertDatabaseInvariants(databasePath, (ip, row) -> { + assertThat(row.keySet(), equalTo(expectedColumns)); + String asn = (String) row.get("asn"); + assertThat(asn, startsWith("AS")); + assertThat(asn, equalTo(asn.trim())); + Long parsed = parseAsn(asn); + assertThat(parsed, notNullValue()); + assertThat(asn, equalTo("AS" + parsed)); // reverse it + }); + } + } + + public void testCountry() throws IOException { + Path configDir = createTempDir(); + copyDatabase("ipinfo/ip_country_sample.mmdb", configDir.resolve("ip_country_sample.mmdb")); + + GeoIpCache cache = new GeoIpCache(1000); // real cache to test purging of entries upon a reload + ConfigDatabases configDatabases = new ConfigDatabases(configDir, cache); + configDatabases.initialize(resourceWatcherService); + + // this is the 'free' Country database (sample) + { + DatabaseReaderLazyLoader loader = configDatabases.getDatabase("ip_country_sample.mmdb"); + IpDataLookup lookup = new IpinfoIpDataLookups.Country(Set.of(Database.Property.values())); + Map data = lookup.getData(loader, "4.221.143.168"); + assertThat( + data, + equalTo( + Map.ofEntries( + entry("ip", "4.221.143.168"), + entry("country_name", "South Africa"), + entry("country_iso_code", "ZA"), + entry("continent_name", "Africa"), + entry("continent_code", "AF") + ) + ) + ); + } + } + + private static void assertDatabaseInvariants(final Path databasePath, final BiConsumer> rowConsumer) { + try (Reader reader = new Reader(pathToFile(databasePath))) { + Networks networks = reader.networks(Map.class); + while (networks.hasNext()) { + DatabaseRecord dbr = networks.next(); + InetAddress address = dbr.getNetwork().getNetworkAddress(); + @SuppressWarnings("unchecked") + Map result = reader.get(address, Map.class); + try { + rowConsumer.accept(address, result); + } catch (AssertionError e) { + fail(e, "Assert failed for address [%s]", NetworkAddress.format(address)); + } catch (Exception e) { + fail(e, "Exception handling address [%s]", NetworkAddress.format(address)); + } + } + } catch (Exception e) { + fail(e); + } + } + + @SuppressForbidden(reason = "Maxmind API requires java.io.File") + private static File pathToFile(Path databasePath) { + return databasePath.toFile(); + } +} diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxMindSupportTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxMindSupportTests.java index 84ea5fd58435..3b1200363778 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxMindSupportTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxMindSupportTests.java @@ -361,8 +361,14 @@ public class MaxMindSupportTests extends ESTestCase { private static final Set> KNOWN_UNSUPPORTED_RESPONSE_CLASSES = Set.of(IpRiskResponse.class); + private static final Set KNOWN_UNSUPPORTED_DATABASE_VARIANTS = Set.of(Database.AsnV2); + public void testMaxMindSupport() { for (Database databaseType : Database.values()) { + if (KNOWN_UNSUPPORTED_DATABASE_VARIANTS.contains(databaseType)) { + continue; + } + Class maxMindClass = TYPE_TO_MAX_MIND_CLASS.get(databaseType); Set supportedFields = TYPE_TO_SUPPORTED_FIELDS_MAP.get(databaseType); Set unsupportedFields = TYPE_TO_UNSUPPORTED_FIELDS_MAP.get(databaseType); diff --git a/modules/ingest-geoip/src/test/resources/ipinfo/asn_sample.mmdb b/modules/ingest-geoip/src/test/resources/ipinfo/asn_sample.mmdb new file mode 100644 index 0000000000000000000000000000000000000000..916a8252a5df1d5d2ea15dfb14061e55360d6cd0 GIT binary patch literal 25210 zcmbW71$Y}rw1s5~wGFi4w9Ph66UT{FF}P@s46+m1!Mdp|t!>4!5JLAF>d1s2Q5jVu;RAz1`-OZdg~OUM!ANOBZ8 znjAxJKyFB)Zdw0WavV9HoIq|&ZbD8ZCy|rMDdbdg8abVuL2gQJMs7~dBn@&4au$he zm*+d1+=`q-&LvC9d1M(mpIktelU8zT(ni`z2k9hTq?`1RUeZVU$qI5Ca$9mca(i+I za!0a~tRkz)8nTwGBkRe9WPofS8_6cJnGBLGWGlIdTug?@HnN>uLM|njk;};yYYw1Ke2PJ~CjeQ*T7#(KRd4np`F0|^{{tCIe`A|u=mi8-;?@YAzV{vpb?97-NW9!?%X9!VZW9*ugAQMPRFSn9`-$20ad@=m075_vLX z8zFWI%Bh-v8vN5W{|p{;CiJs3{cMY6RCYd{i+L~^@$*ogLpdMiZj=j9uH*3+k{3a{ znD!;)rHb-8E<^ls&A);{{-CBmMEzmtkI;Tpi$4baaq3TyPm)h* zc~2wn8S2mG_0jXFJx{)Xycbb^M!|VoK1O+&u~+iPSzd+yn&!U_{|(K56aHJ8|2F)0 zH2+=r?`i(~%C>v}{X>+Gls_8lOcdE?pJ@4?Qu~bj9LIb?`%CgGMR}~4*WYOVx6Jtt z`uDVd(BeNr|0(Z}Mju#y!Mq*8++X4UM*Da25Asj)FGbOQ0n#O56clQD5&UAtO7i-c za_A$ekIL%>qoI${{0*41At~A#OM9G_H=f!A#5bnB2|1CRq$tjFOh59bpe#h0n%4`a z!Jm$@fXB>$y{Q)8jQZx#XVNyb_!iJ-QQuP2XH(k>@j0~TYVlI&oT~+8u;;7T7_1jj z#5v3U75!$_{H@{JP_{#{GsmIDozPvH?`Dn%x>wVE@co)!!MtstZ>#JLvVF3>mbU}* zc0^t!?JBZb%d0`WR`ctaQ?Izeh4cf+YtZr<;WweQp)@lmNVaHst<)Bgi^-71vSGIG z+NmuemnzDBT}Ev=+F7CLD{*`eV;zXSg0eHpF(|80dKudVc9?djc6ok85{(8;d zjmPXx?t%E8%HC+QmbW+beVDVarte2>fARqGK=L5+U>tu4?L*1K6y^0CPVETtNb)H1 zXp3c|>^L2Zy6#0e4&^44<54a^If1z+k|&WTlcy-k_D)6qX_|jJbIyQ%rly}o{cQ3a z@?7MdNBew5QSU}~QOnNvi;#D5K3;GM{7W_eGUi`SUO{4B6kMg{T}}NO@>=pb@_OrmpOK#F#a|94f!qk9r-%&$SbR@3XK*F#@OJD|lIs5K(qr0fZ& zQwwT&E!0}cMdV^KgyY+2w=2qiSOR@1^<|pA9Qq2)UkQIF=5>%eYw=Zx@1pr(_}Tdr zVSbb}$u7kSTt|gH)MMmo)UlR!FByl{M?0Y?uOmsVAMssjr?fcco6O6Q1ISyGk8ivl z{<^$hILJJq??!ufExre}JrUna)Ay#n5A=O$@0X8n{0Z~}s2@lkL>`>aD?9{whoYRo z*kP~_r+ox@BzY8hG3#WD8^&(Qoc znRAxKvdKf5eh%`^W$ZlieDVUs?n1ec{zb5_L%CS_^4u-_+u7QG1(whkO_L?_Dft=lmm>djup0aVoCSW#_*dlDit^ZR5dW6?cgUYa`+Mj=z_)-uYWY7g{xkGn zH2qijzcKbZ`G*$&6Y;2Ihg>k)O0r9O@vPfj2=R+RahAb+CrCm+W6WO9m@H?vV`8&d|)ch)CPrZvV6UO>|u zs5g>Lh&OBTAp91_TFFJ^VlqUwk?rIX#VM!JUxvEwLs^b;4z(4qSE9t}?*zMp_Ri!g zau-F}FJa_$YJP+{QPM=bi*`5JqbQG$A-KSqX@iny9lIs*@{vhJ(sqd!gyIU+%FQmRFat@`v7wo;YynW#BtNHseZ-3|qX!?QF z4}yNMrXQm0X=2`By%wE_a<~>h0{)Sjf0VL|jwX*m{8%l19QEU&pODw5;T(%j%KJqp zGw&4Wr)qk3j-1Z;8Hk@r`z-Qo#c40nKUY!q=Xuo5$8i^+T#u6N$BVSQiy$rzE9Tsw#c!l`6Y_4ReG7Rjc^i4VqOA80YIl-% z;rP3?_&xCN&HK}Ftwr}U_kp}#^dPl|5Pz8VBjlsxW8~wCvi&EhJxM-AK8<>w(ej?9 z{v7!{;xA}%jMa>6zr4(xSIAe%*T~n&H^?{f9yf{hTd4PKly6bqq4qBM9{E1`0r?^M z5&1Fs3Hd4cnWDVzTZcD~f~ui$^p*f)872IgJScR0^t@cX=8^aK1KHUB3b`!n=k zH2qijzcKbZ`3L!@miL#k(Tt_IfOcU%UR-3cY&t{pOPDi)97&EMN28uGv^P)`*I&FL z3MlQdnm!Ktc+H;xe`C$xgvU(8d2kIEPex7?$`q6el&L85m^TgfbnSSad-0}>Z-)5h zv}bC02J|hc&my-ZXOo<7#dGM-Rg~9MYO!plw_Aq#=4a){=E(J-Lt!kPT#` zqNs0k%%kFFlrUpKvISaeKCgHY{Kbrg$TqT_T%st?b1CwdQD083AXkz*ksajD%BcUIa_h({kijT?r#mDlPI~n>ZTHdMD zPa{uPocSXCGm(FmvgP@l4gVa*&einusGX1a1+*_DFCs5il=WPKyi2KHMqW-{L0(B- zrD)8je+_vpc^!E@>U;*}2Ib3kZiIgm<2S>8fc7o0Z>4=3c{_QBqCEaiYIh<3Zrb;d z_iB0fA%4H|4fdft{z2wGL_UnXN3`P}h5s01kCRW3Pm)h5%KDzRShm=d`m^M7sN)@! z=e0bn1DT&4$Cv28OumBrSGD}tsJ{;V4cc##Z)tgND|-u!P4T;ozem0g?JJZIwEPd@ zf5iC5YtNekY6g!nnC|-@*72Yec$5v?==5==KMhZi1<&mf7bGTf&MG? z-xOzYofZE<{ZH~Q#S#lyfPJRdVks#^!LcPpD43Ha#VBnkNGzFyGJ^Rd$x+az&>l^W zAvYj5M9xOE$CBg7@rvTQOD0g;81^QbK9Tw)aEvU~TwM93Sm6*z=Vw+gkv?oG~l8wHCJ_Zl~_h zbSE_z=_Wm-S8+?swGuzI3fT21+tA;Z+>YFy+(A*E-;UHOaa^NVGx?e=Oj`8bZKY(%r{TmhKxZXtVX7U!~-Aem5@^(d;e+S}sQooD5 zTZ`XA?Ox>FNBe%oxgTnI4>I-;@*dXoN2osv{W02)Yw;(bKS}*5@@euJE$>-s&msSL zO@BezrC4hvFQI(Gyq96WLi<(nHS%>udHru7?@j7&k#CdlknfW3k?$+Y<3GUhdOv&w z|6>&|{f7CUlAmeEeU7{@H2+KHdan@hbE`aDf9gFm0K1!OsCCATJRq+L;-rvt}3HQ%M|GOX_r9_D*V zAL&<=$5&9>hTN9i4##h=e<3BX6Y^--+=Ka%aR>(cXm& zlbwq4JR-=8Qa8yiE#8fI5A~R)uU7W_PY{nY-Us_0lmz`G?0snW!`@ZdvfUK?wB~2v z4={HPxmJs>qc(`V^|W^*cPIByl=bgPZ7<}buY@Mb0`$pQ`&}H5N{D(FF5%`a4{$o7maqAAK?6dF5L% z7p3+h`4agu`3m_e`I@3U=5=askZ+=1%q~By$P`*6oL--$Q z{>RJ}>-|$r|BU+Q(7&Mlr567R`qz1X>v8bEW!`t>_xbpU9}xdh^M8W>GxL5Sf7Rl@ zA^yAO|Do)y_l5o!bqiUL&l_0?yNG@9z#ggjqhODwKSt9xfW4vew|+qp&1wIA zSRGC$>MKJ|hu7z=k0b{Ynbcrad#Em&>TmJwL$@>~5lEs-y+ z+9jaGqpYR=A4*`U$7^*stgD1dtg${?;>r@qD z^wx=vGs>1$C)XL($-cgUL@W}{#FB}$93vyJ%;Uq*s8gi*&l+7GpWDG2V|@`-V{4$f zy1cpvwRI;K7)?#pxLB{t>acsQeq3$Nl!LIab0Cd=O{djJxLgiDDy*un!bl_r!ik8f zD($cY+be_BwMKPIb8~wzP+i#;XbFalra*I`t+oau?RQu)!gj9{ZDehyQZf$7_1Z!K2P`}k!ZzQcrD@N5X<|c~GF6zuXs!}nk z=vBAV=k};R2qadQ$JVIWal0|KPE6<2 zfKj%xr9Xo{j2qSA&bVm=)sjhz35RoA>UUdR9;@FgYR#H|KQEuhAtrUG9@n}i8I73< zc4hGr>0y{NnAw<3p=5kOjeKjyR5QB6BCgtQvsoST>hm5Jl-{SfHjhs%%;x3B)K*NQ zHD)Ruj*F(OeS=s+o3v*&OtVD7jHs6MUBM`Ao@a!j39C`nBQ9>G-R87<{1{BpX`1to zmYpu24J}uq+gfAdV!BhYsBAfm4(zbF>kVuNOIsE<8ZE80i#cUl+m;(mZJ08$8N1DA z#dJ~4NQc{loHSzJ@!G|dX=pX3)+aI<_F+M=t?i%7raGDGPp0G=!noV97@cbM(8SsQ zW8B?7zaMok!M;~>k*Sn{%Ob9R0N(X~#eP0r9- zj30LMSQwM0Kb#m;ecak6B+u@m%SU+-1-DXa|Ui6%W*1H6lr=Nt@$z6xA6 zo7;h*6q|KJEYXz;$Hi3`WlaI)a3e%*=n}CliET=A^0lE~d^Rr!w7D-9NhQR6idbK#ob3#OB;1+XDRL|F27u=ZHsYBNe}d!sieHCsT~V9yS!us;$|k6 zj`bM}W9$3OOx#Sxj7n=ru1+szq0?=}rHICqgSPX7fNSu(-C_t@8;z-r{h5fmr}p<^ zVivZBd&REP6zhv+%&5_fbqintCymy&>L&EL1MA<87Tn^7p*eqR0sD?v{Q>Od%@H#m z7duWQX^mk!?-*IF1fwpt4s|z)TQ??B{w8NMSzC?33Wp6hO(!OkY)9Fi?AVIwge!5| zY~0^l#3mDu?K)s4I3A0{9!g~aE+K;cS?<7=;==`q?#bJK8)28viQW;<13~#LBW}KY zjOi$l&oPF$$+a{axREBpc8}kv3P*Z7F?&V#IB;jT^Q>v-2b-tu!i`Baf!-0@yx14y zgHp#>S*cN*UEfAH5j7T@X)~OP^knZeMm4U|Oo{6eI~i_);&POkYiz5p)J@Rg@`)*1 z6%>P!NUlky&D0v+x`RRZA>kWU)+Okmj)@MN&0gX6L@O%LK{TCRU0oF@|N0N_;bJsg z9&xJ-V9%&CV<~IQ#O6~QOQHL6Gb-CcXQwmUf-r|_A-jFJ>^_$m@wdgbn3*oz(vzuf z`9QcbDE(|7rHz*Uh|#(RlUY0lc1)936YX+Txa}@mw(Xq%e_S00&4JBabYly)5lk#I zkxW?=*ha)&C>$e*Gck?I5E`2y8$)+^E3p0KT9bnJKl)L2FglaX`E5?onRVC?7A4F9 zYzGN*9UcL(j*|V@yn>pRZ?V(q`Fo2(7^+3_2!`Rqtq@!O?2|C@aee7bN^ZqgaldZC zH3~r+#>w&$eQrz<&J<}6XCO<9D^)wC#X+O2w0KqS{CyseyTYgSm=rpP*JDnnLu@)V zn0|7Ada-Jwn1Z1ib0CwB^q9Dd#7(I3CMajRv%+rgQs=MCVV!uOBzU*uy#b|D!suOD+jnov{DE)$xk`QBQ78 z$vTBFoJl1fxBNc8OYB(0J$nWrcZ96IeR!g zDqjEGKCy?2^Qm7QOY#&fi^a36c-n5j2Ln6Ck2xh2hLJ@r32G z@dm#T?`b~I-|p?r$<7$w8vpjplQ!y8$$@@|Ry>Z%*GM_z{S`iU#FbnB;>4sdto1+o zaP^Q;I{FmM_|fNc6`_i7r#`1B{MQKoZKL|{IwPH(f9o%w5QfuVrTvf2c$3tA8KN8FE^GC;M^>o#0W>U$xInak!uj(EX5Axz&UVlZDTSK@a z;`Pc0A(xHzP$$5z8Fsw=+Hf-xQ?DVKz=xq&M18d?5O414f1_Ny%wQ@StplC$Sj4Eo zM-#Qtg)7X6J*sslg`v8WAI%(2zg@JB%P%ds8PD9j?x49YW7K0wX5WOxQzRCsQC+(j zZ{rTO9;t}fO;ek66o!~|@`1Bfe9Gb5Rv-i)YK!D9JSPls z;qs9U)6VJl+C6+bY>&peW0`PV?0xC}aKucj?I%>*AB(j28w->1D88S{4|=lOMPpqN zfBx|&Ck)kkcF?ie<2zWg=>g3It_P;6`2?}1oc zqJgL|MA=@>+AkaC*9Qd^H;_9lr=S zs_~ujYGfUq?#|r9Pfi%ljS9o(@VI>rF?{lRqU!6XSP@onJ@`D(h2KW-nJk41S!`_* zmonMm!!mS5(1&;+%-KUtOs)cl$LW$Ei>uIqt$pH4O7_vm+KDEYH#cU#dA8Ly)rwCD z_}E!#G~?G0tU@2wgxz8HA>#7aBl7?E^S5&Iu&W^R) z;m+S{hsAxP=1YAr)>(Y^-V2Rl}*(x!C-B5TlQ;27z@;H+BzG}nzb5Vb~>}S)q<)N?k4G;>=ep>!Al#h;-@7nC((|>=8-RlIeR#Li%WES z?D!Bj>k;ho_?VjLHs#MjJGDsLXidh`z1V~BQxQ~A>BIShP zOe8eybl{sF-kI7kq}&F@tvvf_x;?uS2Be7Hqd&V5ik9$AH2;d46Na@BPHJGYrIVIn zBb*e^U5~?O&p)xtN`)|->%pDGg`Zi)J{ZCcvmxCtf2OpiamVbi)DOhF#9gd^K<X%97d_%vj9tB>5``&nJOD`oA%2HY_&l*BJB_#v$hlPD67rA?z| zz`&buEGY)Mzejw;EjL0q3Z11UlcU1tvgdDY;zXn{tP!0u7X2>Hp>)b-7||(nv`W2r zsp0)nRSIF4y^7yi8|1F8Qe| zEbgwt9?mI<$1lIjC4K=DXRr+OHkw&$rh3ito^W5E8SfmxS7-5YaRu&U3>gb^q3kJ$ z&B%p!IiJ3GDGaCc(G_^AcX{wsHfJfuI+?KJRW)wKye(^MNf>s#(!}EiejLKDn;H2# zoa}jgF?8A7;)WK+W2m%;dD(7!!j#WUm|D&Ksc^TMDvxo;54B1ky92&ONAM=8`dM70 z7jI6H{HN!fFw`R9YW3juE56-|-zXOKBvY~VNxYh>2haq5G-_L@wCt@W`)h@0I$Yt6 zM6f1vONqizONmY6_kEZ6ja)QcA9F_4D^p|4StBj{EW#xl7ES2mNMWdP6z!BwuR{}+ z=+n~a?brvVlepZ+Jd9|#V^+Cc9>3y+m^U- zm{|XQ?b?(-)TN0V2iBik{c2lPiS^gol`^v*68T3qt#y#(^COwGgte5H`PjXPNsj1{8Z)B=@v69^Y*uMpYaB%qcE^{N+#L ztMM%nzyD_Mo1qvssZ5M#g9jlKFWI^2Wv4Kl-oO}m@ROJLlb=vEHUs=lD_>{$M>Gp# zviIe0YuM~jGk$24zv<=dVQxA2%Ht8O$Q6jcC)m|0mVBTUmbT8(fU655n_K5uVK@_4 zJOugFyP23zfmnYWzfaX zRSJKodB%rCxB5FCydX_&!nQAdxWiuytj7~-5YO7;7dt$S@vHb`b^Z>AU)&Ks&Bn|vU=BjkKuRm^D`}(4t zjZrflNyYG&W68ujRc7Kp(i&p@*_O^35s2fxX8i;29bph$KWOKi|7v~hmb?b zVdQ4yaB>7$OpYW+k)z2mx=9b|C4Hoy43I%GM25*SavO46ayxQ+atCrp zGD2bw$azpsR*;n>=7h|zBCE+7vX-nPqhvkVKsJ(1wo zYsr5-iD=sIpT;TJ%`kebS097w2`qLusfA4`=X0_0{S{lUr)Uo zdXJ_jsi&a#(%x0ePgBbvKdb3|)cc`t$m>JVXO01+!;m&HW;b$oau0G(McMAX5WhF| zeKdVvYWtDM3Hr&@Pa#hwPt)Q~hy5+}Gd2Azls}vLIpn!o{&~nhpE~YE-V^T2aUr#f zU|+21m%zVN^Dl#cxx+C`l(|At)FJ(=pxw)Qt|qS`uO+WT%=NTyAa5jZQj~piGvaQc zek*yKmVZ0)@1TAsc^7#%d5_{SdwlMren0tuqP*UN)E*)qCLh6d9;N*l`8fH6qAdR; z;+~@ZH2UKM+Rs3LHt!4F@jTK;NH1vRUu67C&|jwg3i&Genxeek>xg?p^WS95ThQOu z^mnMgOTLHv_i3|@vc3-;j?MhkKSs>&NS`QQ=6?$RGtK{;v0sp1BL6GeUz6V`%JTMH z{SI;8GyezjM=kCrYCj|H7o^{iens23#szI#*82zY|J2I=B_I2ag3(GVD1bjGpI=Z& zy$JeXO&>yiC^?MW3~|G0k06T`#q|qDQX8eX8OE<*47IW3IC8wAEH{DLM8r>`Jz2}& z9QqXMQ^{#sehKoY=l$VZz@LeP{wOez&P1Ap)P^)0Nwjqil8<%GCFhaz$pz#>#G+2o zR@vSy;crEI5m`z)$*mP-ybJMe>K;w^Ivm53)H&Z|e1KYz4556OcA27#+lJb<6-h+C%l%URC~=qok775*ybttNNU^4Cz?8F6bheHZv<-XDQJDTpH-fYgrE zhh*jR3p(I;YJL~|gyye) zn5G{N{Rkv+Z%1nSQSgsu{4wOQ7n}_J6r?klcPe=rdAedT zVnmW{KMV1X(LNjYIY_tDKNt3SNLSE5ANB>xmSr!bei8JGXuMZ%lji;jJX5)oq4_BF6wua_mKA@ z?mpW0E6REwfc~K7KLr0_#yz6xk2)M9F($IiQQxEQqp#HbC5)>=Ts7?) zEx#6eo#sa!jxm@^vL73ew*jdUDNd~k_EOr-TDchX7V68$X!AlL&w_5`5o{(HNT5-3FzxIeLehc=Jk+CGNr}! zB5qgeq77Nv8O1R#LhGZ}pVwsDHc}rz%ptTlk-L$*lY1a$Pi4!pdr{vT`aZPx)$;eF zwm*3Q;tr&JkQR5a!!dRk^+PrNFlvXBN03L7N0CRP-eYJVt0>!d9JS+7zkN?9Ivj;> zBK{<#zmQI5%qg(HL^>7eW~9@opH7|u?J}e@>7PZOtthW|4z+X1^ALYN?F+~Y6=nQI z)GkKcB}kVle=M#ql8BLUmm}{A+E;4&RnV`devPJI3;jCHzn(ESK);dpO^RdxpnnVU z-axt)=>=-H!M>gL9k3s!eJAX@lr7tLH~f2;cQ1J#dA}C-0JR4Z|B$lB)ob~WF#l2V zG4gT5KcU4vN&PAEY4RDwJxlvJ#c^VspLaL}bNwmS!%Ld~GRwR|zKZ^AYrqX@5d~N`9s&`}cEdUpO4&xqhVn74@&l zZ&2U2w7(<2Cx0M+B!41*CUI}F&R-R!{Tu50UGwd^@~6_q^ZpARSW^W^Gm!?Vyh4bg zj>00Ou}FjS^%M?a-caaV_l28j`NNq%0{O+7J`(;Y%^wYajPfVAHGLfP@yaipKu#nl zk(0^I$tjAm%~Mf+8ub!#x|Tmf*%SH^XCNJgG)v2$4Sx>OzDRSCwn3VQk zZIMUCmWF8 zn9ra1IQ*q~zpxp8O!HeAx*Lk z@o{C#_P0~F$PUFxh%fA-mVmtxX&q88we_&Ol`ZS(fuCevDz8tvj@qtd8gZF?eqok+ zA9T!FQJ1W5gTpbIYfbtC$lt`g-L$yfsqI1Ti8zjxEVnoOeN^6L_Iu%ejNP9+fIN^q zh&)(P)^iBzIh6Wg?@vA%{?SMeARU8r6U!V6`#7XC=pPUJgnYTe6XBnv z`KU);{}l36@-*^v#mPUR+?hz1Qa=m!*~*sX&w+og=AQ@ue9gZA{)L)<5&Vla{}N?y zZm*fklwWu`c?EeTc@=pzc@65jmbN`7uBU&4qHO1l4#(#9J>RV9w=nir@;35z@(%J& z@-FgjT=yQ@_mcOK_bX1BK>tBS*^Y;(J&f{?X!@hnA0x&3euDOsuwSJ86!|o?XOu1L zeU|!j(4VLMLOy>A)>Pq3)L$lF$>$foO6@hoy{_qRI2=>#__rAIHh$v%5OME-TZ8X{ zxYxq>kiKWk`>;Qtjabn}+0Kuke@y!m@>B9N@^eKQ{{^)#$*;(-QO`G8+_%)fb2z3B zq5T8eHJJ8~h{s%(b^fdpTT zc5~=cXiwGhr$H~#{OOFDLCz!%#Ld#;W>cR-&L!ue4>)#33$*x!D8B_`w$${kpf93c zs_9OLV_Fq;7h;Y#3ktNiIP=+b{F0;ny&)maHSA$XiOgo@^i+k=KO2;2IY!YGz(c zQEDwJrf69{R$g}n;#aCT>9@jP#qz5)eJ5&b$ej_lmi8{N+i9C*8yQzD!F(yQs9_Dr zYjx)Hq@O^{{z&VXzaI7g?QXJ%Od>C(Y+1IK`mSV}%pfkS#r47O*Zd8P+o)KAdn($b zGwpPu2X>;GeGfXDEBRSX*Zy z-KPAavtgfubQRLMTKswN&)5757=Iyo5%Moax{Qo2cE4xLau7n$MqZxAS)9-GR6}^Z7-0!M|Jc?}2}>=HCbZ zewKYe(;uYv5cx3qh~o5@Q0_6L7pXrE`w3;sc0Wn|De`IZ8N@wH`#D7!|2*^;^8Soj z@No^1C-Yy1{|fC_$=9^}*QvchzKQs^Xuqw+y#xJS>hF>7I~+5xR*ODR6xWmfNBIA7 zKIk8VCEzEF{}lG`v_FIWxw2(Dzo7ml`4#fN*7Coh{w?_(`91jq;(tW?mHtn#e@1)6 zJpV;ewomxKsTh$bul)!4C)zs#`F|;YutQPA3@)HHh%AIwq~#-c@DSz=C5LJGn<0O= z@@Me+vfg6GjwDARZnRcz4E3?(IC4BW0r3-&rqG`Rd$PkZa~t}bD+*tv!Bdf@5QF0&Lj;*8H0HxIUD8YAZ*K=MN513zKD}=!

Z>K2RvpuyP$Q=T2SY!E`k}B7Q?_vn?gL5Ibp&~&qSTI}b~Jemc`SJx>N=kG3FL|7Ns6+b zlM#1{=AX)#)5z1wGZ1&C7Izl)v!S1(>E}{E4?5?Ur+r8O}~-aO~}8QHv3uDdn@$YsNYWBLEfo2 z3v*J$%J{nxe-G_@$@|Fr$p^>>$%n{?6=gk-pq@uH|1tQFGwuoUN%AT3Y4REJS@JpZ zdGZDFMa5Yk(SMnIg?yEK4f_)2_~6&+zd^nU-3R?G&;hkSnME*?vLjI~ayB1}C zL;9WiALO6pU->w3tsw=xYX9`yN|z5xD0=50Z4Np3|hQk3V?!C%U_W=)SlZ_)f^@Ru`gg{H5B-b#HHxtiRGT%$Obl=rxndApD% z*+#}uZ#!*EQI_j~-l_Rr@DrN9j%C(E@21^DCbhT}wO(>pGL3Q>+F7!X>?b#n8_5B3 zlcH?rZm4H>>U(JVp30v4DDwAa{yyZss@s?OMcLNBep${|0I| zk~bmlX4u%`xP`{VFkG!9JfP7GKo;`OTrv3fe#ylRuC@l0T6@lfRI^lE0C^lYfwZl7A@YeF*%YP1&*dYh8(LnpX+Goc=$f%O#mm-o08j=lH7`1M3$0Ha%<8>x4#6 zqo67h^LxWSuTfmzYYud$;)(2lsL?1{)@1uec`DtTN}Jh4DrvY^gq-fM)8liaww$Rd ztB%CHVZYDgF^cO_=K5}PLsHe%+8C*eMpiT!wTdTqA`rO_?*cCL28@u|<4OSxFo$6~>b=Frei>#=uL0yf@Dq@CvwcF=(dxK7QK-{P8 z1pUt3je5{?Zlk!l$4qAv$xMHuyW4X1o6f$BQL?feH*TaljCw6|k+D2txLZA5RP1pE ze5f~XYt_2le%x`q3q`H8YTfd3<)QC-dis)yxa_?Qn(6VO^IT51FC?m?6LzzivI|2l zHwL6J*}0KJm{FBR8>&_s4QfCS+~pOQf;j-?@A<1qs8jB z#Eg`cxV=879AH#JJ6DM}9CEt@MsZbgAnxo;^&2G>ODY;#MM2C0AEt-ssJxvYLAM_( zr?{abkhWUCT(U+wZhNkYzIVBC zJ5^?u{T_?7%x^JbR=PhCw=#A;tpSXn7uATVMcZ%Jf~nvR2f_iYrsiZO(Ut0MPb51t z-B~VxW9xQ^|uD;GLi}PxAwRG&M zWyA(DS*r)Fl(S)V5DPRYW`t;^?Rat1_C)gs{4S$7+L2Brvk7|ztQoO{{y!#}SY?qO zD~&Z__ruDd&xsW#)&;Hr+v^m)Ywu7nf?^Mlt3RH?Y{TEa^+ri+x!4cNQ{^dKBbl_~ z*?cc74+fng`dHd3IyZ_cYZ^(!BzkVRuo*w8{b2nC}Jfp%)V!v5$M0&ADwTAr85Ehc` zN!q#bMzefApSaOgnJkKwCX(%anQS_NValYs`^5H(ooY=%tR>Q1hxS!0i?uY@RK|>0 zV{}6kc|CsIN>wM8nq0Ljs#;q9Ipw%) z!)Oe)aXbh_UA7}OI`J%&doMZ$TU&K+cf3!H z{sp{F{6%d!J71Y6ga;COd4tuK*|pD<`{X9mC|SM2YBOR&$xYjZP1yk~;VhQ#YPSnZ z0e6juHXdYoCs(oC>kW%}(iD$p+R`brz0FK=Q)sElbeT!9k}+kBrhYVB)VaptcA-Xh z04rHO9yKpF8Xi|zj7F`w-o)ub-F<6BI@tH{XiLh6nAvSCb}q;Gib}Dmx-b@4yrQYL zXU`Kk)jWO=mRe(6Z00?wwgeu!VmmKcGoji_L{rJm+Ju?xH0ov6-v>^3y=HPCzp7is z(i7W+xSlYvd*)V^Rl`R6FTl(pT>1Y)uqywX*Afa$ME%A=?owEEon2E!SN=` z^UZ2E&JJR`!+s&IK}VY;ZXcdw*xhG;V3f@MRty*(EbfrYiD$4Vp&T?aKVUwWCyZ99 zPQZc_4>V^yiN=j9udZo`v{Xhb%Ny$r*%g^Sqc@%EPbA|w$MtRMO7$83Dg$#{oaSV= zV_o7Ij0G+)nDewpv=>tk^SCD7(}v!b{ZL+BzgU&V^zvg*56eLlCkS{R@f^x+xbBcY zj2(TGmFzYL>}45UMOQopu|v3Vd2|m7*nsMOb2 zwpKP9_Kl+xQG*9_DXb2HbnN=XsUzSEqq@4zRJ+;U&QoerRjR`F?B|s@tcl~5JYm_6 z-`+9ABngN80rbwwOv381l9|#j%k0i#ep;9oT=g-PWmIcfMx;u1pr{{Pl6qXp3rJ6N zq}WHWCwb(dwJw7yF%R3F>v}QNfo3aXndx|!(U|TulZj1g#^G^Tlk7;Pdz6c%oPPl4 z=4?$elTBp%vX*#y=ECFRK#76I>3-4wqAD@v)=Vgug4k8$k^H`k?z_Rt7`1)LUHhy=+B7Vq zsxN`Z^?m0-#zD(eR}@b_G1b)-ZBJV-0UTO!!N#J+9FHr`0DoJFEIS|x@=wuw%DO3Sl#Q)hRo&BRd4)0Q)t zGD@1NqK%6q(elQIhRX65Lk#k2*%)*b9yqK`IND+kxa2lnyCETZ%yQzXTGFsWIY!Lc z?2L*7gs298J$Nrv)kw#lFJh0uZYvHlP4TYIQgNni$FoFrePelbRcU=?b4^7}q`_Xf zDDQS*s(9r?Q1kM0+U0ZmaGYygpX$M)%zE7Y)_OS;S7O=-#aOu#Z=F~teR#pa)75UM z+wE1SB{`zPvF}$j)a7#pP;E`z+9c;BPMh`Rm8%*mTa1h|Eze7Mo{ARXxg#DX>V}o4 z?Ok{*`9oN9HQ4nsR@|AfjFOs&w2g?`-(@A3uer{bk0f}1yGbu*#`2`KF{?&2!&ej4 zd94kVD_aaTutu!1d3jAa7I4(rl)G8%47q9N7LGRW+#auZnX9+@GV8Ii;(UoEi1Dqo zeT;BK9UwwzXArli)|Tz*Pjrtf=nY~uEbS0)V5RuDfiDrMv@?l)EGFN|j7Dl)7MM18 zdSC!TYWC5wcL;3HVYfVzq|!LYi3bzL+L;z->lqbk9J%{b299aWz)rE=j3?Gv*tNy6 zkFwF8N*Rl-RA<_3FI{C}Ly>!hxH4MrmM;sk9rW^jA9MxX*eqi5ZAR24_9v0V9@Q-0 zXmanoIM3q?gqRrEi19`)_8~Db=;<#>7;Z7aqVaAsEjMDPDfYZEEmph>S23dX_)3B) zEO)**?gU$$Jk^=;Y@*-H=uC)%&#;P<4ce{)2|lqMt*U$6m@Ej zYK(Z@^82uJ)O7dZ$ z(|x_EE~{U@a>zz5EXF|#O~E&wK+q|l^f^cCaDT|}K?Ms7?l(%7G?>XOZb=mLIK|tq zJilvB?z%X%h_i8hW}v4nk#eg24==DW>D$L5@lX_{v74&hDd*TNKxYK~V#Jqbdpq@o zEZ(GJElpMVWo&<%L5&z`S3sRjZ6`Nkm_%-$$BTQaOC<5s&f>F#*g30elxMhi65oo% zK};T#ZAWYk>h>zpCh>h|VNnF@vq;QakIV1GXHa!uqT}(S7C8D$jZZI@*<&p36Q}r0##k2P%oc4y zJ85G_}p{A{NBL_7#EqrBo>RaPTB>~y)o4Z5*ftF7)t#&mWh(27c7 z8jS<%@L@|FtS}SgI4e_sqTm!Q4&%}M%bPfmudI!g8?!5$OIy^V3+G7rVG^hF)v`Wu z(i2Y(%~7opy&m+V#=1l=UTQP!>#9U=weaK(iBk%?QS2L{270+c77GM-geenkH?4N7 zyN~Z{%`IIPzO^aIs7j~$dQsm*Y&-5Ud|NIHn`LEUJIi?;_*3>T|ES+Yyk1WT7nvQx z8~zehG+7q)`@>~;jMz0P*Zj9yq^7FEr$!IXYYm-g%Sv{1rxN^9(^%D9S=nGLi8j_$ zs1L1crpT)Vg7Gp>M<8xrCg=Z4YwhcxYr`QI&Ro@a6G7j}PYa{e_w#@Iu##aHmE-$7 z&#-GIVViT8;dRy}uAH;~>#Fvpak#~Y_n-@Bj~KpN#@A=s6L@>W7Z$#v*W!C+d0kBW z+=HGRkD3GMMP5Xi-hUqp`x3aj03HOm0BTKaN+sL4Ce^ZNTCUXmQgyGH<_Wl5W$28K zG7~>2h&$2*+~R-TmKro)z~{vd*V2YV`cDdH6AxgTIz*qRLOTQ`vHZkOyO19 z&9e5mb{Xma>-4s}R4gQX?Zd=u%%<>DjoR$g?ZsO1icw>KqnBOkE9>x~H$B*$RF~QU zZs~t?sV5v354MF9R$_;qfZa#0(D%;@egC^cd|MSSLNR<(X-lPXtdify?EN4HHH}lP z8~b*|-nb{Bzx-uBQ_a4d{a@o@cNXTKH{iwuuf~qUmzmlMn1Ab=U|EN!&0S{U$2-+G zy6|6@z;s(M$tYeh((TxHryB8+i364RDY6;O#OKW1tA+fn zq`smuii39UgB!X(~5Gx*xme#ZBSMzu4K9TAO;~cntR9hY))qHZ=FOSxNq8 zYi+`H#-naO)_usMu0#8Ou6K7x`~tY57heE-#rJ;v#rJOc@ksVwQ>>*j5^bqA@WQ^L zu^HQ$x~I0XaL~RdX=Bjw=Rg0!c#C_&9xWaUQSsJYn#CKM{K60wzYmJ{HT63VM#yxv zWB>4Z%dj5g6YTH8e_cv#V*U_D7*AU>BQ5p$XLY@a^BTSa;zLQwh~OtHd>W5s`{MXM z&wE1a+tqw9v31*Y{po?V>XYW$9&=qPy;gp3PbJS8 z^v~QxGM{^PU0YwGyM3+In~HZ84eA{9@aXz>Gi$b)8Eb8Jpx0_m^x_>dWh8pn;y**I z&6qvC_%)%Yr@gJN-O9w%iC*y|**lA^$GwOecB ZDf}pQ@6g3{X}p@J+t+3io2+w;{{xjsLht|p literal 0 HcmV?d00001 diff --git a/modules/ingest-geoip/src/test/resources/ipinfo/ip_country_sample.mmdb b/modules/ingest-geoip/src/test/resources/ipinfo/ip_country_sample.mmdb new file mode 100644 index 0000000000000000000000000000000000000000..88428315ee8d6d164a89a40a9e8fff42338e3e2f GIT binary patch literal 32292 zcmbW62Yi%8_qLY+fe?D{EWIb$&896NB-GH0NEcHU2qnP~s@TN>_J(2u3#eePSWvNm z3id7*R1{Feh6USqKQq^62MqfC-}m~Yy;yZ*F0d=?2D`%^uqRA{$uI>5U=XIl5KM#VFau`7EEtBpU~kw59tZov zelQ#6!2WOm90&)&!Egx7g+t*mI2?|EBjG4G8jgWu;W(%>Y0l+%cmkXNC&Ec^GMoaZ zLY-w(=R`Oio&@uB^xxEZ&sW>4R-ks7S|Ra>>{GF4$exKk%hJ>?LYuAl97msvJ`WZv zUgE^(qn|9j6ng|$cCOS?2?o4H&r?FsU(fLBVpiaebM z-_>fX)z%ZgM)tK%d=2__(yzz9!HM4}{U*m>OU^p!H)G!dZ*}r+vv#fK#5Ta&;T`Z! zxDjqr-Mg%9)_6DiJ@8(5pYrZ^@*Y5cQ2IlT{;>Q87JMqVSzFN8`zQ--im`}>r zej9s>?5D7whR-uBHcJe+!{}k?qpDFKi>@VOR_$Bozc1|zbke(C*B>c2kfc5B((s3GEA|oJ=5p&TBV>`YD}vX zlCQO!xu(Z-<7Zmm*jdU8JANp@z;{GPWsJ`evABDiQVSt>!ojS{M*U7L;9WA8{sA=?=Hpf zM!yH%3-5#XTYB@XqqA7)LH#a$SbRu~&eQWBk>65$RPC_ZX0?54k5T7w+3%}8f&V1j zB5eouQ?j4Ne#X-5&sOQrqCW?pcjCHJm9{zlc5+^n{*t4=Z0$PQ>qyG~hId8(Z6n`809r&)LneThrE4Tj#%Ky-b@1oB~(m!_ePvn2<_`BtQM(*eE3%Cb< z3BOYAUTfE_>+^Zdn~Ryxe#O4V{sta^-@=3NJ4@5=d$b?a|3~bf;2|gPXT^U({}uig zi`VUk_Pg@_!2T2drE^mo`)}DkYkT9zRkC)R#JI|iUPXRYV)2e%4Xrv%fHjn#=;YOu zU(5P+OC7zAboMQ-o}<^7-oW~C4PhhL*vV_6yr$^Q9KE^p7S^x3o_H&16VzI(C9AbT z_rtc9W*^!q-rn&$$nWU*o#@jUc2T^m6YqxJU3w2k?X(HbwlwSLCB3&=Ut)dWahCP;{NnndWjk7q&*#NVjR9~V9As%?gVBbl zPA>LPCq4{qI2@t8k!oYqMrjOhexohDdd441?l?=MMPuWYe?m+*bC@WtSZxyhCc`O~ zCTFVR(;WXq`O_W$B>8!cpKtBB0_la09+5vot%&$cILor0=Ha!t*=loQTHIXu^J2c4 zQ;D=Q=s#cf$=L4wSRlR3>ZZ>^au&hGiZ8)l3YS@${8JQP?)ayYa~eF|vff(dor!i9 zJX_jjYUkjufahA8`sbmY4=+&uO6*neLU@s->31>OCGb-1+gsR|J9$^A-j$AjmHey8 zTkYuApk1r@8b`kl{d(y)VBhG(Z?bm%%8IWeezOz51?^VFZ^K^i#5YL4-SO`r=T5j0 zZi06y|88rWeYgkxUU;A4_hUccJ{HT1dmQZv_@wf;IPs^@ zpO*fNqi>b}tm8jN-t+JU#kXN^hcCjH;LFN+6?+HVDeV=XufFbsw|J&yk+IbXrOitlsc`_aFa{tfm4_^q@bun)rT;P;mGb>6%d_oLcR#16$`-Wtt1 ze}TWk-;{sY+9v+H{6C2O>F9q+|J(XH`leRpN-!>#S6RWzRnV(Cdc5>%=+$8Ytf5#< z>_p4@dInyL&bxAL<!w3*;A)7jg6%(r2R2a`YnUvmJj9Ide7lo3V?Pvktoi&X;yFcB$-@*b878)cVal z7s17F30w-7!BgOJcq%*%p02UZu(qjtCi+?MY^rwX|9>yS!c<^6J8R%Byc}GhPGqhER9c z=#60$*c3KX|K`>QyaQsa9S!%t=4_h``DXn+R_Qo*d90&Wt zelQ#6!2aq#z}lwYK>34+4R-V)Xt|0HwRWS;Xv3Yn5sHsQ9|cFlF;3oCv~kL3&Sw1a z=+WmfLGg)bli*}alRriAsWHEi?pBo($(!!;Ro8j1a+1$i^>Q%}Js%dpLKuNF;7m9R z7Fn8cXRF>E^tr09Ggf6DezEMI)JmNE`SMRz^PX2Jc?+C)8QMa)NO_B~m%ycP89c?( z%xAgsPj&p$NXUIR3erGxQ+0xHJU*YKIqMfJs`Pdi4;*Hy)ty26#?2BUYDi=$? z#PKhce;Ijt7G};@z$@WZ@M^dkUIVX%Yv6V8dUyl85#9vX!gcUwcniE0-ezg$wqEny zfPOo?1KtTY!cFilcsINU-V5)8_rnL^gYY5vFnk0)3ZwV!G5p8j6Yxp61wI9zhR?vQ z@LBjAd>+04x54f3MfehY8Sa2P;VbY}_!@j2z5(BaZ^5_WJMdlj9(*5u06&Dg;79Od z_zCzmgaoy*ZKGw{ToL=ApKj%KPdk@$Nygb4{ASJt?^u+ z&ud;yQ}>W!KRf;}@_%*w-{c=A_jmXQ{1g5K|F-n%R`sb>a{M^?l^wr|d?lIOcvuZq zhY7F-OoTOIEm+&q)T)D4SM}>**LUI#q&IZ@M&vYxO<+^l3^rH47T7H<&3LV(w|4wC z)^4o3SG6tq?VNaf=^fN&sdZEvMNTK!Sy~tDu1>#hXx$a>f!z}(!DN^M1270vVF;$d zbeLgj)|IJovK&85PA}=bvHQT|U|-nJ($vXD%YpsXe*pGCI0z1gLtri(sy@T4-9%@h z>Tt&%;qw`7q@_2e8F#eu#$b=NH2OH{U*!0U z>9Yhbh0Bz83ifhKv#wL6pN4)qJOiEy&w^)Lnttb~-wMY+mz?w9`S1d`Qu(W}FSImu zE|Pw+<6k2GQpdl{+D$e|zryFMrZZmkN~=}93SJFY!)xHRK3}}Gle!Zm` z{|4zdqTdA9N_!4_o$Q;jZ-KYM+u(Y*0p1SpfOo=;P-`(`-v#fsG}=9A_rm+&{qOeW@kzJ^J_VnK&%mwlSxc{u*Wc889=@P@+pxDg z3rA6{#o^Zv9{T-U*-Qs?69N%F8vSmKjB~SZ_9X|ZG0vCI9M50fmNY8cw@#_gVkYz zrHR!*ON2FHE%mF7T?f|H`O;mAuV-nVhvzpS-VipjH1!)R-UMBT(`e0LbJ#-lv`6tR zVJp}gwt;@w7Pf=!VF%a|c7mN@7uXecvov$+&RlwE9zBUC#o}guDT?V##|Lb#=@*or zs&*3nL$cGb(_se8gjp~Qds&)#z0vxp&T-g%op?XAY{hf1`@;cnARJ_A>JCO5qJFuK zK2-iNV#DDGI1-M6qv06!nSec3_Bfxfnd-;utQn5CG~=FN^E}mYV_5z*I-{G z`&!L;pY=UP?fB?AZXowYOH=PA^;?U+4&Lm-x<@J>kh_m ziuv(($-kT2d*HqBJ}2*fv?V{0Sa{Kf_-vP5och|2Oo*@OLNv z2il*?`^(Y)wswnj?MWqTSBry{V|mr8pjCzO%GV}(V|%Syb(kQ#26iH>DXkrLEm#}Y zfpuX$<<`e;02{(aurX|6X~u7g)(kdRofg2zR zyHKq+ejj+8rI|-xw0_FV#?FELoxA~x4@4gX2g4ywUasbIh3~=l;RoiW{|3&_gI6aEGN)_()_#qwI}8C8$7e)Y<-b+)Tl!LJJAVKt}% z-khr^SQ@*A{6w|W)oK#21#3&2q*g~Qq*fQbp3}cRS_9Y+HiC`SuL*WjOEYdW>CMqw zz?M$D6Fv=wIC@9vozOeOF0iYU*G+ld(R;w2FbO8Z6c~U(m}=RI zc~wudzFB9w>SvIb>F8N#Va0nndT;rCh#d#}!hSFt=D_}N02~Mh!NHbhK0{PL7k#Ls z50gF|eFPi{N5Ro>3><4|>W))?oh@UJhbK7k31|~7Tg_JfWV9)8s->whP4N>Qe>yoQ z!919+yaMb(OVd9heFpkWN1r9V2wih9xpUxLI1d)X5;z~8Y-#$IqAgI}GDlx1f05%a zmcNADrEnQM1uloD!qY5UJ%)bO|0;5>mcAPM8h9;S1FwVE`+TjdD*py}BfJT&h3nwW@D_M0 zybZ308!WwbnfiAy-koqG+yw6uLz>6kK3|Rd#e2~2_4yLMlz*Sv`)Z1ry?OvX2p@tE z!$;twa5IcP*T=1IYCWO4PZHk(pMp=rXW&-&tflGyocceH{sP;w2A+yy_fG&vumeWGzc#oi4+ zgP&WP{4bQZ2VKv~XkR(;y^8PCbJSg~zF(|K&e!l8cmRH@yo1=^!SCS@@JIL)JOqD+ zzrbHD&76KiI}Cr&tF{-4Hf{jbRhm6gGp+VGGz2wz4#H zY_0lj9N#a$EqU!6y}k4f=pA7vY2DR2Yi)W430*8r-LCSx#e6Sra(YPbY5N;LNq%z7 z_xhUt0qH^PR2YJ3Fdb&VOqd13mZn}WwBE1}JWk{E#qI~Qr47Z-f&HZo@cG&d#vcd= zNtfp72}59REYIk}$Q=$xSen>K^&5pg+R?{QYiz7mLUeD&+dQ+b6X-huPE@@~*puND zI2BHVC&KCQB$#Ju>gTIo0eYdMN6=;{KGV@>p%+P??dWsl&m}g`(TmYaEZeMB-pOj) z&`aS0Sfi9Js=XpR4$J zj(@)V3mku?{8f&Bq5O-ipK!6GUt;YxUlF@Zd6#2f0k4Et!K9hQFG#e|J9+Z)r2dl$T0@q4iE zh4;bx;REnN_z-*;J^~+wn=Q>akD)ydpU`+uVsC*@!KW=vzh{)U)$yMt=Q;Si;xEMF zem&!a?ZjS$FTs~%c?mnvb}IiB>{sDy@OAhGd=tI}-?lXKd`ETOb^Q0_zfax=j{YIq zF2z4`^pDX$fuF+N@H6#ApL9nZ{Pv=Ej$Rn zgWp@4_5Gm!KRW(T@(+>sGyDbq3V(x#;qUMd_$T}e{%u*q2P?riSQ%DM+DKG$oFcpSi8cc^7FcW6MFzf|;!#?mh*cbML*)Rw8 zhXde1OY>|8>A4R^9|CjXP&f<@ha=!fI0}x2W8hdg4(gnlF~`FbEIqAlbPp$LK9h(~ zhErmBHKr;)4gEwo9i9a9U_LB>g)jnVsMa~yGvO?0Mb|_`xHylf4THi9se}>r#t=`^f?or1<$r@8{L}~=;x~c zdDb@d&qu#N`bz9o@IrVIyck{rFNK#`ntGS3&J~V-B{^3~zZ!cryarxtY5K2G-gW5L zJNgZ1H!6M;_F5;tPG{;N^jqMq@HV&}Zm=}t-Hvt#yi@fzVsC-pLF~!zl8m=>^*8boc=rIze4;~M}JNF>*#O5H{o0G zZTJp+7rqDIw>0zo0PRE7-R0;X$^Y2#Kau~b5XVO2%{=%}|DEu!i%{X77?N$Ch zN8d01YhvHP1MpjT5Pk>0hd;m{;ZN`o{2BfNe}%un!eoa^l)`oRpU04s+hYc*f`FVBBJR8Br zkb9BXRO2^8YYtn$mar9U4ckCJYzy1L_OJugT{Ls(1Uthnuq*5ayTcx^r{)+`OY-^J zA15Zm6d17d#_-}MCpD%errA1X4(aNjp{8e_n5lN9T9(>kwXoU=YP}e@H|zsd+tlc5 zY3zQ=&vyJA`TZS#0DT6+L5dH?9s+aWP&mxe)E}<=5$GcwU3V&RwBwHBKSw8RGK&%WdbYhEQxn|x=$XTj>%dBnmQ_z>gQ{idKJ01HB zcqTjxo(<1|E8w~CJWDgq`DhoY?n-Q)Q{shA-bKVNR^BDpmpbvwq+gDHg=Gib6EntD z@M^ePx%XpV1Fw~~2KzdAy``yfgW@+j{!Q}NlDE#$ZG+l}@a{9N_Fz}^GD zgkQnEa39*>R3v8M}&D)zRaztBKXI6JQOP2y4Px@OZJdS{=u)i(OBw zkKMqDH*S?k(Um={0!_&F-vWrS{SVt z^zwRR_kqWWeX;w&Z0P0ZsP)&lW5of`n}_Fn@j+^Xu|0nXcCI)Sdl(!Jy}S|FBgIkJ zqv06nPWGUY<<9itym^s9*)vLu3i1XQlq@VR zD_xp1IeR#!2@fAvvaoEHpQPeQ+2WGYIa5MO!C)XM5K2u7q-XYwdBL=_J^o5LBeVUr zM~o~f^*YXrc;)>aa>kGICyy9kTvQe*^p7jcD~l}f=S~TzLP}avAT1>+rDwU5nna<2 zH;n{k8oI{T=)({K@{6{@h7v zLCrLj6bNXjs2!cBf3uReXqwBqyb(C51AR0vX=kMD6k+kDA{!UwTGTKpPPVrFgC95pJlW6=i6C zDM@L1j+vpJfZ>QU~bnhlHg7@Xrz9?5ea zR#ZH*uw1v9;TDbbz-2Kti+4~~@1%PZDsQ!{lgQZ>8OP?DZdxhFN4 z;!M%MO;2ct&O(;*0-9P9B<)!sGuZP8H`VKLNXOx@zeC&0{mH|}fV02Ho~N67lMIcQl{;I7c|5D<0zO8I;26J*XR(qp$dE0 zS?P&-mr*Cc4pQzPy(n*1Q_LV4SxKp>N#z}^XBJWMq_R?z^#4G*F1xphQP+D&?yh+Q z*sThtWNI1Os(_tTxu0eZowL)XPbIIcw@-l-Zx}O5xtmteI_%>N2K6G*t3WrwRIqL^ z_1L==)LBuwntdeYepW>jGrJYkZPu|#V_JB}-Ysw4ncfSMrAJ+N8975o>w*r=o1dpw zj<=RTlHR33?^cJriN##AVeKpVYqyWrn<{&}-iX-?7nGIe&GlY3nxO8Z_Bq2lGwN9G z1-+Mtj&kj`tJN=OtUq~BPHEn&GJ=|6 zdb(aECO_(#eTcpavh}JToIN++Tv+weOU>K8fZje@mE*Y=CTHAWEoz+JIeGI-N+Y_} z0q;WVr4z`~7Gm4aL-YmLw#c8{R$DhIpszt7l%zj1lpAx77~uE`TK(~PMT@=No)SpQ z(5zCD^g{CvLCmv9Ai9>}+UlXh7cPm+%P(12I@6qN?{%BzeRK4}3Iuuvj`Y3TK=0_= zZE_$(Pb8#+9n>q-ae_ze+aT}p3@XhlF3|FIgS-RkeYNzF3zV3-bm_ZJhQRR29}Sc*?C_aZMb(iva(Dc z$9MX4-tSNDe9(KsS!qe>8pB(bPKDzgvFl^?;T}A;MBgp@O4mnbCS$H(DV5zt-A+h(7n@ z78jK*vp*a))h4!Zz7`ysQC<_bg2|a7j$V3~xM}694iy%rg!5(u^+;v}%U6ArT0vp2 zCif_3$S_^OAtl8#{lmOJa;LS?+a)EOnW;ZgBPqeKK26?ST`{$y#h1^P%ANM=oE=cX z7N&1jK}6FG=rK_&CR8xP@?u9noYpFq7S7kB4(nT^KOz)7Qp~7mhTgLOrCv~vCmb** zyL=4&jibU-T|U3)Hs+4>ezF!9nuG04GAkTPNlyW&1wy)>&X7gNKj!DHW-qu+7f z4rYZjv(iH0^n&!%Xswu_S`|*ydC<|1p)O2$vAj?~H_zKkW@jo|HUEFASP;s{*TRpg zXw;1VWsZ5;Jk2pH?}&=1n&N-0I;X$)T>8(lUzhTwt3X~LFI;fsM5Ai@vCCF;d?Ml0 z)Pk`7#-t~mAD!nB;<4);{iMB<6VXdH-Mf1k;neh4JxBb{dS=;O;#cYR#VV%HsGy=1 z|79biKPYm&zbECEM(kf*rnL;F>t!2A$Wk{3p9&Gwiv%`y7p zvyp%y1->8S++pg#V9qMqd&CDAd|?4C+jW^|ZY&s=0q&HAnpa zuh-1f3#COu;mnlOwCK!Y!ZAzz<89bVrPB2+E(`~CH=?ipnD!qp!{}oel&iBcsHm8; z(md#0Uu||UB{dk1*ySD}9eak9EdN2(TD1bP%q!i zC`(@#rygQz=zpl^Ep$d8-+Z5<^UF74F!*0TA1auizMpy#=7%Eg{G#H~mn?etqQByD zykAZBr$clHQ^OhF2QQSCWT>KyAn3=9~6DpixKJ?K!#>9WG82y{kG@p)KFsxs4dc69Ldd$(aw8oniZ#o(=x)LeBN_MN)@m7A5X(OC3FnT@P5+=Q@Kz_s-d)F-O;0e)H@YZ z^9yw8GxcGNJ>ax(Os#m1(Z7H=ze~&K7)lRj=?7Ja*9dcrsmG{z^i%fUbkV+9`5EE# zqZS>?%FmRTf2=hhU9Xi>&->d z>C)*lO7yS&Wz*;7%`PdOz9>?Ag2nm;}IU$PeD&6__rk~D8#Vg9hf$by2>qWNVS zX?ISf_&?Hzs!DWT@wxgHHFII!%*cXDRn&E8aY&ki EKQa5Jod5s; literal 0 HcmV?d00001 From 7987c01f3643e0af0a85e050dfd906277f4ba4ff Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 8 Oct 2024 04:37:03 +1100 Subject: [PATCH 15/85] Mute org.elasticsearch.logsdb.datageneration.DataGeneratorTests testDataGeneratorProducesValidMappingAndDocument #114188 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 4e1d6bbf4461..0a9736c47479 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -368,6 +368,9 @@ tests: - class: org.elasticsearch.xpack.inference.services.openai.OpenAiServiceTests method: testInfer_StreamRequest issue: https://github.com/elastic/elasticsearch/issues/114232 +- class: org.elasticsearch.logsdb.datageneration.DataGeneratorTests + method: testDataGeneratorProducesValidMappingAndDocument + issue: https://github.com/elastic/elasticsearch/issues/114188 # Examples: # From 8f24f43aeda2fb7a1b65ca9711d0615e3a7544a8 Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Mon, 7 Oct 2024 11:38:48 -0600 Subject: [PATCH 16/85] Fix issue with releasing resources in bulk tests (#114186) A recent commit incidentally changed a release resources call from doBefore to doAfter. Several tests depending on resources being released synchronously which requires doBefore. Closes #114181 Closes #114182 --- muted-tests.yml | 3 --- .../org/elasticsearch/action/bulk/IncrementalBulkService.java | 2 +- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 0a9736c47479..f205c9ce44a0 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -345,9 +345,6 @@ tests: - class: org.elasticsearch.xpack.inference.InferenceCrudIT method: testGet issue: https://github.com/elastic/elasticsearch/issues/114135 -- class: org.elasticsearch.action.bulk.IncrementalBulkIT - method: testIncrementalBulkHighWatermarkBackOff - issue: https://github.com/elastic/elasticsearch/issues/114073 - class: org.elasticsearch.xpack.esql.expression.function.aggregate.AvgTests method: "testFold {TestCase= #7}" issue: https://github.com/elastic/elasticsearch/issues/114175 diff --git a/server/src/main/java/org/elasticsearch/action/bulk/IncrementalBulkService.java b/server/src/main/java/org/elasticsearch/action/bulk/IncrementalBulkService.java index d5ad3aa2d29a..58ffe25e08e4 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/IncrementalBulkService.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/IncrementalBulkService.java @@ -194,7 +194,7 @@ public class IncrementalBulkService { releasables.clear(); // We do not need to set this back to false as this will be the last request. bulkInProgress = true; - client.bulk(bulkRequest, ActionListener.runAfter(new ActionListener<>() { + client.bulk(bulkRequest, ActionListener.runBefore(new ActionListener<>() { private final boolean isFirstRequest = incrementalRequestSubmitted == false; From 49902761479e85da18d0521c3f9a4fba428b51ad Mon Sep 17 00:00:00 2001 From: Iraklis Psaroudakis Date: Mon, 7 Oct 2024 21:15:04 +0300 Subject: [PATCH 17/85] Fast refresh indices should use search shards (#113478) Fast refresh indices should now behave like non fast refresh indices in how they execute (m)gets and searches. I.e., they should use the search shards. For BWC, we define a new transport version. We expect search shards to be upgraded first, before promotable shards. Until the cluster is fully upgraded, the promotable shards (whether upgraded or not) will still receive and execute gets/searches locally. Relates ES-9573 Relates ES-9579 --- .../org/elasticsearch/TransportVersions.java | 1 + .../refresh/TransportShardRefreshAction.java | 32 +++++++------------ ...ansportUnpromotableShardRefreshAction.java | 15 +++++++++ .../action/get/TransportGetAction.java | 3 +- .../get/TransportShardMultiGetAction.java | 3 +- .../support/replication/PostWriteRefresh.java | 9 ++---- .../cluster/routing/OperationRouting.java | 9 +++++- .../index/cache/bitset/BitsetFilterCache.java | 2 +- .../routing/IndexRoutingTableTests.java | 24 ++++++++------ .../cache/bitset/BitSetFilterCacheTests.java | 2 +- 10 files changed, 56 insertions(+), 44 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index f6e4649aa480..1911013cbe8e 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -235,6 +235,7 @@ public class TransportVersions { public static final TransportVersion SEARCH_FAILURE_STATS = def(8_759_00_0); public static final TransportVersion INGEST_GEO_DATABASE_PROVIDERS = def(8_760_00_0); public static final TransportVersion DATE_TIME_DOC_VALUES_LOCALES = def(8_761_00_0); + public static final TransportVersion FAST_REFRESH_RCO = def(8_762_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java index 7857e9a22e9b..cb667400240f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.injection.guice.Inject; @@ -120,27 +119,18 @@ public class TransportShardRefreshAction extends TransportReplicationAction< ActionListener listener ) { assert replicaRequest.primaryRefreshResult.refreshed() : "primary has not refreshed"; - boolean fastRefresh = IndexSettings.INDEX_FAST_REFRESH_SETTING.get( - clusterService.state().metadata().index(indexShardRoutingTable.shardId().getIndex()).getSettings() + UnpromotableShardRefreshRequest unpromotableReplicaRequest = new UnpromotableShardRefreshRequest( + indexShardRoutingTable, + replicaRequest.primaryRefreshResult.primaryTerm(), + replicaRequest.primaryRefreshResult.generation(), + false + ); + transportService.sendRequest( + transportService.getLocalNode(), + TransportUnpromotableShardRefreshAction.NAME, + unpromotableReplicaRequest, + new ActionListenerResponseHandler<>(listener.safeMap(r -> null), in -> ActionResponse.Empty.INSTANCE, refreshExecutor) ); - - // Indices marked with fast refresh do not rely on refreshing the unpromotables - if (fastRefresh) { - listener.onResponse(null); - } else { - UnpromotableShardRefreshRequest unpromotableReplicaRequest = new UnpromotableShardRefreshRequest( - indexShardRoutingTable, - replicaRequest.primaryRefreshResult.primaryTerm(), - replicaRequest.primaryRefreshResult.generation(), - false - ); - transportService.sendRequest( - transportService.getLocalNode(), - TransportUnpromotableShardRefreshAction.NAME, - unpromotableReplicaRequest, - new ActionListenerResponseHandler<>(listener.safeMap(r -> null), in -> ActionResponse.Empty.INSTANCE, refreshExecutor) - ); - } } } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java index 6c24ec2d1760..f91a983d4788 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java @@ -24,6 +24,9 @@ import org.elasticsearch.transport.TransportService; import java.util.List; +import static org.elasticsearch.TransportVersions.FAST_REFRESH_RCO; +import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING; + public class TransportUnpromotableShardRefreshAction extends TransportBroadcastUnpromotableAction< UnpromotableShardRefreshRequest, ActionResponse.Empty> { @@ -73,6 +76,18 @@ public class TransportUnpromotableShardRefreshAction extends TransportBroadcastU return; } + // During an upgrade to FAST_REFRESH_RCO, we expect search shards to be first upgraded before the primary is upgraded. Thus, + // when the primary is upgraded, and starts to deliver unpromotable refreshes, we expect the search shards to be upgraded already. + // Note that the fast refresh setting is final. + // TODO: remove assertion (ES-9563) + assert INDEX_FAST_REFRESH_SETTING.get(shard.indexSettings().getSettings()) == false + || transportService.getLocalNodeConnection().getTransportVersion().onOrAfter(FAST_REFRESH_RCO) + : "attempted to refresh a fast refresh search shard " + + shard + + " on transport version " + + transportService.getLocalNodeConnection().getTransportVersion() + + " (before FAST_REFRESH_RCO)"; + ActionListener.run(responseListener, listener -> { shard.waitForPrimaryTermAndGeneration( request.getPrimaryTerm(), diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java index 189aa1c95d86..99eac250641a 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java @@ -125,11 +125,10 @@ public class TransportGetAction extends TransportSingleShardAction waitUntil(indexShard, location, new ActionListener<>() { @Override public void onResponse(Boolean forced) { - // Fast refresh indices do not depend on the unpromotables being refreshed - boolean fastRefresh = IndexSettings.INDEX_FAST_REFRESH_SETTING.get(indexShard.indexSettings().getSettings()); - if (location != null && (indexShard.routingEntry().isSearchable() == false && fastRefresh == false)) { + if (location != null && indexShard.routingEntry().isSearchable() == false) { refreshUnpromotables(indexShard, location, listener, forced, postWriteRefreshTimeout); } else { listener.onResponse(forced); @@ -68,9 +65,7 @@ public class PostWriteRefresh { } }); case IMMEDIATE -> immediate(indexShard, listener.delegateFailureAndWrap((l, r) -> { - // Fast refresh indices do not depend on the unpromotables being refreshed - boolean fastRefresh = IndexSettings.INDEX_FAST_REFRESH_SETTING.get(indexShard.indexSettings().getSettings()); - if (indexShard.getReplicationGroup().getRoutingTable().unpromotableShards().size() > 0 && fastRefresh == false) { + if (indexShard.getReplicationGroup().getRoutingTable().unpromotableShards().size() > 0) { sendUnpromotableRequests(indexShard, r.generation(), true, l, postWriteRefreshTimeout); } else { l.onResponse(true); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java b/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java index f7812d284f2a..9120e25b443d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java @@ -32,6 +32,7 @@ import java.util.Map; import java.util.Set; import java.util.stream.Collectors; +import static org.elasticsearch.TransportVersions.FAST_REFRESH_RCO; import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING; public class OperationRouting { @@ -305,8 +306,14 @@ public class OperationRouting { } public static boolean canSearchShard(ShardRouting shardRouting, ClusterState clusterState) { + // TODO: remove if and always return isSearchable (ES-9563) if (INDEX_FAST_REFRESH_SETTING.get(clusterState.metadata().index(shardRouting.index()).getSettings())) { - return shardRouting.isPromotableToPrimary(); + // Until all the cluster is upgraded, we send searches/gets to the primary (even if it has been upgraded) to execute locally. + if (clusterState.getMinTransportVersion().onOrAfter(FAST_REFRESH_RCO)) { + return shardRouting.isSearchable(); + } else { + return shardRouting.isPromotableToPrimary(); + } } else { return shardRouting.isSearchable(); } diff --git a/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java b/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java index c19e3ca35356..3b37afc3b297 100644 --- a/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java +++ b/server/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java @@ -105,7 +105,7 @@ public final class BitsetFilterCache boolean loadFiltersEagerlySetting = settings.getValue(INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING); boolean isStateless = DiscoveryNode.isStateless(settings.getNodeSettings()); if (isStateless) { - return DiscoveryNode.hasRole(settings.getNodeSettings(), DiscoveryNodeRole.INDEX_ROLE) + return DiscoveryNode.hasRole(settings.getNodeSettings(), DiscoveryNodeRole.SEARCH_ROLE) && loadFiltersEagerlySetting && INDEX_FAST_REFRESH_SETTING.get(settings.getSettings()); } else { diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java index 21b30557cafe..6a7f4bb27a32 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/IndexRoutingTableTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.cluster.routing; +import org.elasticsearch.TransportVersion; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; @@ -19,6 +20,7 @@ import org.mockito.Mockito; import java.util.List; +import static org.elasticsearch.TransportVersions.FAST_REFRESH_RCO; import static org.elasticsearch.index.IndexSettings.INDEX_FAST_REFRESH_SETTING; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; @@ -27,16 +29,22 @@ import static org.mockito.Mockito.when; public class IndexRoutingTableTests extends ESTestCase { public void testReadyForSearch() { - innerReadyForSearch(false); - innerReadyForSearch(true); + innerReadyForSearch(false, false); + innerReadyForSearch(false, true); + innerReadyForSearch(true, false); + innerReadyForSearch(true, true); } - private void innerReadyForSearch(boolean fastRefresh) { + // TODO: remove if (fastRefresh && beforeFastRefreshRCO) branches (ES-9563) + private void innerReadyForSearch(boolean fastRefresh, boolean beforeFastRefreshRCO) { Index index = new Index(randomIdentifier(), UUIDs.randomBase64UUID()); ClusterState clusterState = mock(ClusterState.class, Mockito.RETURNS_DEEP_STUBS); when(clusterState.metadata().index(any(Index.class)).getSettings()).thenReturn( Settings.builder().put(INDEX_FAST_REFRESH_SETTING.getKey(), fastRefresh).build() ); + when(clusterState.getMinTransportVersion()).thenReturn( + beforeFastRefreshRCO ? TransportVersion.fromId(FAST_REFRESH_RCO.id() - 1_00_0) : TransportVersion.current() + ); // 2 primaries that are search and index ShardId p1 = new ShardId(index, 0); IndexShardRoutingTable shardTable1 = new IndexShardRoutingTable( @@ -55,7 +63,7 @@ public class IndexRoutingTableTests extends ESTestCase { shardTable1 = new IndexShardRoutingTable(p1, List.of(getShard(p1, true, ShardRoutingState.STARTED, ShardRouting.Role.INDEX_ONLY))); shardTable2 = new IndexShardRoutingTable(p2, List.of(getShard(p2, true, ShardRoutingState.STARTED, ShardRouting.Role.INDEX_ONLY))); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - if (fastRefresh) { + if (fastRefresh && beforeFastRefreshRCO) { assertTrue(indexRoutingTable.readyForSearch(clusterState)); } else { assertFalse(indexRoutingTable.readyForSearch(clusterState)); @@ -91,7 +99,7 @@ public class IndexRoutingTableTests extends ESTestCase { ) ); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - if (fastRefresh) { + if (fastRefresh && beforeFastRefreshRCO) { assertTrue(indexRoutingTable.readyForSearch(clusterState)); } else { assertFalse(indexRoutingTable.readyForSearch(clusterState)); @@ -118,8 +126,6 @@ public class IndexRoutingTableTests extends ESTestCase { assertTrue(indexRoutingTable.readyForSearch(clusterState)); // 2 unassigned primaries that are index only with some replicas that are all available - // Fast refresh indices do not support replicas so this can not practically happen. If we add support we will want to ensure - // that readyForSearch allows for searching replicas when the index shard is not available. shardTable1 = new IndexShardRoutingTable( p1, List.of( @@ -137,8 +143,8 @@ public class IndexRoutingTableTests extends ESTestCase { ) ); indexRoutingTable = new IndexRoutingTable(index, new IndexShardRoutingTable[] { shardTable1, shardTable2 }); - if (fastRefresh) { - assertFalse(indexRoutingTable.readyForSearch(clusterState)); // if we support replicas for fast refreshes this needs to change + if (fastRefresh && beforeFastRefreshRCO) { + assertFalse(indexRoutingTable.readyForSearch(clusterState)); } else { assertTrue(indexRoutingTable.readyForSearch(clusterState)); } diff --git a/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java b/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java index 77635fd0312f..4cb3ce418f76 100644 --- a/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java +++ b/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java @@ -276,7 +276,7 @@ public class BitSetFilterCacheTests extends ESTestCase { for (var isStateless : values) { if (isStateless) { assertEquals( - loadFiltersEagerly && indexFastRefresh && hasIndexRole, + loadFiltersEagerly && indexFastRefresh && hasIndexRole == false, BitsetFilterCache.shouldLoadRandomAccessFiltersEagerly( bitsetFilterCacheSettings(isStateless, hasIndexRole, loadFiltersEagerly, indexFastRefresh) ) From ce73a908cce972a62d4e5c4f3abb9aaf283266b9 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Mon, 7 Oct 2024 20:06:13 +0100 Subject: [PATCH 18/85] [ML] Use the same chunking configurations for models in the Elasticsearch service (#111336) --- docs/changelog/111336.yaml | 5 + .../chunking/EmbeddingRequestChunker.java | 54 ++- .../BaseElasticsearchInternalService.java | 5 +- .../ElasticsearchInternalModel.java | 5 + .../ElasticsearchInternalService.java | 135 +++--- .../EmbeddingRequestChunkerTests.java | 80 ++++ .../ElasticsearchInternalServiceTests.java | 390 ++++++++++-------- 7 files changed, 449 insertions(+), 225 deletions(-) create mode 100644 docs/changelog/111336.yaml diff --git a/docs/changelog/111336.yaml b/docs/changelog/111336.yaml new file mode 100644 index 000000000000..d5bf602cb7a8 --- /dev/null +++ b/docs/changelog/111336.yaml @@ -0,0 +1,5 @@ +pr: 111336 +summary: Use the same chunking configurations for models in the Elasticsearch service +area: Machine Learning +type: enhancement +issues: [] diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunker.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunker.java index 81ebebdb47e4..3ae8dc055039 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunker.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunker.java @@ -16,10 +16,13 @@ import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedSparseEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingByteResults; import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingByteResults; import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlChunkedTextExpansionResults; import java.util.ArrayList; import java.util.List; @@ -42,7 +45,8 @@ public class EmbeddingRequestChunker { public enum EmbeddingType { FLOAT, - BYTE; + BYTE, + SPARSE; public static EmbeddingType fromDenseVectorElementType(DenseVectorFieldMapper.ElementType elementType) { return switch (elementType) { @@ -67,6 +71,7 @@ public class EmbeddingRequestChunker { private List> chunkedInputs; private List>> floatResults; private List>> byteResults; + private List>> sparseResults; private AtomicArray errors; private ActionListener> finalListener; @@ -117,6 +122,7 @@ public class EmbeddingRequestChunker { switch (embeddingType) { case FLOAT -> floatResults = new ArrayList<>(inputs.size()); case BYTE -> byteResults = new ArrayList<>(inputs.size()); + case SPARSE -> sparseResults = new ArrayList<>(inputs.size()); } errors = new AtomicArray<>(inputs.size()); @@ -127,6 +133,7 @@ public class EmbeddingRequestChunker { switch (embeddingType) { case FLOAT -> floatResults.add(new AtomicArray<>(numberOfSubBatches)); case BYTE -> byteResults.add(new AtomicArray<>(numberOfSubBatches)); + case SPARSE -> sparseResults.add(new AtomicArray<>(numberOfSubBatches)); } chunkedInputs.add(chunks); } @@ -217,6 +224,7 @@ public class EmbeddingRequestChunker { switch (embeddingType) { case FLOAT -> handleFloatResults(inferenceServiceResults); case BYTE -> handleByteResults(inferenceServiceResults); + case SPARSE -> handleSparseResults(inferenceServiceResults); } } @@ -266,6 +274,29 @@ public class EmbeddingRequestChunker { } } + private void handleSparseResults(InferenceServiceResults inferenceServiceResults) { + if (inferenceServiceResults instanceof SparseEmbeddingResults sparseEmbeddings) { + if (failIfNumRequestsDoNotMatch(sparseEmbeddings.embeddings().size())) { + return; + } + + int start = 0; + for (var pos : positions) { + sparseResults.get(pos.inputIndex()) + .setOnce(pos.chunkIndex(), sparseEmbeddings.embeddings().subList(start, start + pos.embeddingCount())); + start += pos.embeddingCount(); + } + + if (resultCount.incrementAndGet() == totalNumberOfRequests) { + sendResponse(); + } + } else { + onFailure( + unexpectedResultTypeException(inferenceServiceResults.getWriteableName(), InferenceTextEmbeddingByteResults.NAME) + ); + } + } + private boolean failIfNumRequestsDoNotMatch(int numberOfResults) { int numberOfRequests = positions.stream().mapToInt(SubBatchPositionsAndCount::embeddingCount).sum(); if (numberOfRequests != numberOfResults) { @@ -319,6 +350,7 @@ public class EmbeddingRequestChunker { return switch (embeddingType) { case FLOAT -> mergeFloatResultsWithInputs(chunkedInputs.get(resultIndex), floatResults.get(resultIndex)); case BYTE -> mergeByteResultsWithInputs(chunkedInputs.get(resultIndex), byteResults.get(resultIndex)); + case SPARSE -> mergeSparseResultsWithInputs(chunkedInputs.get(resultIndex), sparseResults.get(resultIndex)); }; } @@ -366,6 +398,26 @@ public class EmbeddingRequestChunker { return new InferenceChunkedTextEmbeddingByteResults(embeddingChunks, false); } + private InferenceChunkedSparseEmbeddingResults mergeSparseResultsWithInputs( + List chunks, + AtomicArray> debatchedResults + ) { + var all = new ArrayList(); + for (int i = 0; i < debatchedResults.length(); i++) { + var subBatch = debatchedResults.get(i); + all.addAll(subBatch); + } + + assert chunks.size() == all.size(); + + var embeddingChunks = new ArrayList(); + for (int i = 0; i < chunks.size(); i++) { + embeddingChunks.add(new MlChunkedTextExpansionResults.ChunkedResult(chunks.get(i), all.get(i).tokens())); + } + + return new InferenceChunkedSparseEmbeddingResults(embeddingChunks); + } + public record BatchRequest(List subBatches) { public int size() { return subBatches.stream().mapToInt(SubBatch::size).sum(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java index 0dd41db2f016..881e2e82b766 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/BaseElasticsearchInternalService.java @@ -248,15 +248,14 @@ public abstract class BaseElasticsearchInternalService implements InferenceServi InferenceConfigUpdate update, List inputs, InputType inputType, - TimeValue timeout, - boolean chunk + TimeValue timeout ) { var request = InferModelAction.Request.forTextInput(id, update, inputs, true, timeout); request.setPrefixType( InputType.SEARCH == inputType ? TrainedModelPrefixStrings.PrefixType.SEARCH : TrainedModelPrefixStrings.PrefixType.INGEST ); request.setHighPriority(InputType.SEARCH == inputType); - request.setChunked(chunk); + request.setChunked(false); return request; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java index 07d0cc14b2ac..a593e1dfb6d9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalModel.java @@ -58,6 +58,11 @@ public abstract class ElasticsearchInternalModel extends Model { ActionListener listener ); + @Override + public ElasticsearchInternalServiceSettings getServiceSettings() { + return (ElasticsearchInternalServiceSettings) super.getServiceSettings(); + } + @Override public String toString() { return Strings.toString(this.getConfigurations()); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index 9b4c0e50bdeb..739f514bee1c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -28,21 +28,19 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.TaskType; import org.elasticsearch.inference.UnparsedModel; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; -import org.elasticsearch.xpack.core.inference.results.InferenceChunkedSparseEmbeddingResults; -import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; -import org.elasticsearch.xpack.core.ml.inference.results.MlChunkedTextEmbeddingFloatResults; -import org.elasticsearch.xpack.core.ml.inference.results.MlChunkedTextExpansionResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; +import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.EmptyConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextEmbeddingConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextExpansionConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextSimilarityConfigUpdate; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TokenizationConfigUpdate; +import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import org.elasticsearch.xpack.inference.services.ServiceUtils; @@ -74,6 +72,7 @@ public class ElasticsearchInternalService extends BaseElasticsearchInternalServi MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86 ); + public static final int EMBEDDING_MAX_BATCH_SIZE = 10; public static final String DEFAULT_ELSER_ID = ".elser-2"; private static final Logger logger = LogManager.getLogger(ElasticsearchInternalService.class); @@ -501,8 +500,7 @@ public class ElasticsearchInternalService extends BaseElasticsearchInternalServi TextEmbeddingConfigUpdate.EMPTY_INSTANCE, inputs, inputType, - timeout, - false + timeout ); ActionListener mlResultsListener = listener.delegateFailureAndWrap( @@ -528,8 +526,7 @@ public class ElasticsearchInternalService extends BaseElasticsearchInternalServi TextExpansionConfigUpdate.EMPTY_UPDATE, inputs, inputType, - timeout, - false + timeout ); ActionListener mlResultsListener = listener.delegateFailureAndWrap( @@ -557,8 +554,7 @@ public class ElasticsearchInternalService extends BaseElasticsearchInternalServi new TextSimilarityConfigUpdate(query), inputs, inputType, - timeout, - false + timeout ); var modelSettings = (CustomElandRerankTaskSettings) model.getTaskSettings(); @@ -610,52 +606,80 @@ public class ElasticsearchInternalService extends BaseElasticsearchInternalServi if (model instanceof ElasticsearchInternalModel esModel) { - var configUpdate = chunkingOptions != null - ? new TokenizationConfigUpdate(chunkingOptions.windowSize(), chunkingOptions.span()) - : new TokenizationConfigUpdate(null, null); - - var request = buildInferenceRequest( - model.getConfigurations().getInferenceEntityId(), - configUpdate, + var batchedRequests = new EmbeddingRequestChunker( input, - inputType, - timeout, - true - ); + EMBEDDING_MAX_BATCH_SIZE, + embeddingTypeFromTaskTypeAndSettings(model.getTaskType(), esModel.internalServiceSettings) + ).batchRequestsWithListeners(listener); - ActionListener mlResultsListener = listener.delegateFailureAndWrap( - (l, inferenceResult) -> l.onResponse(translateToChunkedResults(inferenceResult.getInferenceResults())) - ); + for (var batch : batchedRequests) { + var inferenceRequest = buildInferenceRequest( + model.getConfigurations().getInferenceEntityId(), + EmptyConfigUpdate.INSTANCE, + batch.batch().inputs(), + inputType, + timeout + ); - var maybeDeployListener = mlResultsListener.delegateResponse( - (l, exception) -> maybeStartDeployment(esModel, exception, request, mlResultsListener) - ); + ActionListener mlResultsListener = batch.listener() + .delegateFailureAndWrap( + (l, inferenceResult) -> translateToChunkedResult(model.getTaskType(), inferenceResult.getInferenceResults(), l) + ); - client.execute(InferModelAction.INSTANCE, request, maybeDeployListener); + var maybeDeployListener = mlResultsListener.delegateResponse( + (l, exception) -> maybeStartDeployment(esModel, exception, inferenceRequest, mlResultsListener) + ); + + client.execute(InferModelAction.INSTANCE, inferenceRequest, maybeDeployListener); + } } else { listener.onFailure(notElasticsearchModelException(model)); } } - private static List translateToChunkedResults(List inferenceResults) { - var translated = new ArrayList(); + private static void translateToChunkedResult( + TaskType taskType, + List inferenceResults, + ActionListener chunkPartListener + ) { + if (taskType == TaskType.TEXT_EMBEDDING) { + var translated = new ArrayList(); - for (var inferenceResult : inferenceResults) { - translated.add(translateToChunkedResult(inferenceResult)); - } + for (var inferenceResult : inferenceResults) { + if (inferenceResult instanceof MlTextEmbeddingResults mlTextEmbeddingResult) { + translated.add( + new InferenceTextEmbeddingFloatResults.InferenceFloatEmbedding(mlTextEmbeddingResult.getInferenceAsFloat()) + ); + } else if (inferenceResult instanceof ErrorInferenceResults error) { + chunkPartListener.onFailure(error.getException()); + return; + } else { + chunkPartListener.onFailure( + createInvalidChunkedResultException(MlTextEmbeddingResults.NAME, inferenceResult.getWriteableName()) + ); + return; + } + } + chunkPartListener.onResponse(new InferenceTextEmbeddingFloatResults(translated)); + } else { // sparse + var translated = new ArrayList(); - return translated; - } - - private static ChunkedInferenceServiceResults translateToChunkedResult(InferenceResults inferenceResult) { - if (inferenceResult instanceof MlChunkedTextEmbeddingFloatResults mlChunkedResult) { - return InferenceChunkedTextEmbeddingFloatResults.ofMlResults(mlChunkedResult); - } else if (inferenceResult instanceof MlChunkedTextExpansionResults mlChunkedResult) { - return InferenceChunkedSparseEmbeddingResults.ofMlResult(mlChunkedResult); - } else if (inferenceResult instanceof ErrorInferenceResults error) { - return new ErrorChunkedInferenceResults(error.getException()); - } else { - throw createInvalidChunkedResultException(MlChunkedTextEmbeddingFloatResults.NAME, inferenceResult.getWriteableName()); + for (var inferenceResult : inferenceResults) { + if (inferenceResult instanceof TextExpansionResults textExpansionResult) { + translated.add( + new SparseEmbeddingResults.Embedding(textExpansionResult.getWeightedTokens(), textExpansionResult.isTruncated()) + ); + } else if (inferenceResult instanceof ErrorInferenceResults error) { + chunkPartListener.onFailure(error.getException()); + return; + } else { + chunkPartListener.onFailure( + createInvalidChunkedResultException(TextExpansionResults.NAME, inferenceResult.getWriteableName()) + ); + return; + } + } + chunkPartListener.onResponse(new SparseEmbeddingResults(translated)); } } @@ -738,4 +762,21 @@ public class ElasticsearchInternalService extends BaseElasticsearchInternalServi protected boolean isDefaultId(String inferenceId) { return DEFAULT_ELSER_ID.equals(inferenceId); } + + static EmbeddingRequestChunker.EmbeddingType embeddingTypeFromTaskTypeAndSettings( + TaskType taskType, + ElasticsearchInternalServiceSettings serviceSettings + ) { + return switch (taskType) { + case SPARSE_EMBEDDING -> EmbeddingRequestChunker.EmbeddingType.SPARSE; + case TEXT_EMBEDDING -> serviceSettings.elementType() == null + ? EmbeddingRequestChunker.EmbeddingType.FLOAT + : EmbeddingRequestChunker.EmbeddingType.fromDenseVectorElementType(serviceSettings.elementType()); + default -> throw new ElasticsearchStatusException( + "Chunking is not supported for task type [{}]", + RestStatus.BAD_REQUEST, + taskType + ); + }; + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunkerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunkerTests.java index cf862ee6fb4b..c1be537a6b0a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunkerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunkerTests.java @@ -11,10 +11,13 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; +import org.elasticsearch.xpack.core.inference.results.InferenceChunkedSparseEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingByteResults; import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingByteResults; import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; +import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; +import org.elasticsearch.xpack.core.ml.search.WeightedToken; import java.util.ArrayList; import java.util.List; @@ -357,6 +360,83 @@ public class EmbeddingRequestChunkerTests extends ESTestCase { } } + public void testMergingListener_Sparse() { + int batchSize = 4; + int chunkSize = 10; + int overlap = 0; + // passage will be chunked into 2.1 batches + // and spread over 3 batch requests + int numberOfWordsInPassage = (chunkSize * batchSize * 2) + 5; + + var passageBuilder = new StringBuilder(); + for (int i = 0; i < numberOfWordsInPassage; i++) { + passageBuilder.append("passage_input").append(i).append(" "); // chunk on whitespace + } + List inputs = List.of("1st small", "2nd small", "3rd small", passageBuilder.toString()); + + var finalListener = testListener(); + var batches = new EmbeddingRequestChunker(inputs, batchSize, chunkSize, overlap, EmbeddingRequestChunker.EmbeddingType.SPARSE) + .batchRequestsWithListeners(finalListener); + assertThat(batches, hasSize(3)); + + // 4 inputs in 3 batches + { + var embeddings = new ArrayList(); + for (int i = 0; i < batchSize; i++) { + embeddings.add(new SparseEmbeddingResults.Embedding(List.of(new WeightedToken(randomAlphaOfLength(4), 1.0f)), false)); + } + batches.get(0).listener().onResponse(new SparseEmbeddingResults(embeddings)); + } + { + var embeddings = new ArrayList(); + for (int i = 0; i < batchSize; i++) { + embeddings.add(new SparseEmbeddingResults.Embedding(List.of(new WeightedToken(randomAlphaOfLength(4), 1.0f)), false)); + } + batches.get(1).listener().onResponse(new SparseEmbeddingResults(embeddings)); + } + { + var embeddings = new ArrayList(); + for (int i = 0; i < 4; i++) { // 4 chunks in the final batch + embeddings.add(new SparseEmbeddingResults.Embedding(List.of(new WeightedToken(randomAlphaOfLength(4), 1.0f)), false)); + } + batches.get(2).listener().onResponse(new SparseEmbeddingResults(embeddings)); + } + + assertNotNull(finalListener.results); + assertThat(finalListener.results, hasSize(4)); + { + var chunkedResult = finalListener.results.get(0); + assertThat(chunkedResult, instanceOf(InferenceChunkedSparseEmbeddingResults.class)); + var chunkedSparseResult = (InferenceChunkedSparseEmbeddingResults) chunkedResult; + assertThat(chunkedSparseResult.getChunkedResults(), hasSize(1)); + assertEquals("1st small", chunkedSparseResult.getChunkedResults().get(0).matchedText()); + } + { + var chunkedResult = finalListener.results.get(1); + assertThat(chunkedResult, instanceOf(InferenceChunkedSparseEmbeddingResults.class)); + var chunkedSparseResult = (InferenceChunkedSparseEmbeddingResults) chunkedResult; + assertThat(chunkedSparseResult.getChunkedResults(), hasSize(1)); + assertEquals("2nd small", chunkedSparseResult.getChunkedResults().get(0).matchedText()); + } + { + var chunkedResult = finalListener.results.get(2); + assertThat(chunkedResult, instanceOf(InferenceChunkedSparseEmbeddingResults.class)); + var chunkedSparseResult = (InferenceChunkedSparseEmbeddingResults) chunkedResult; + assertThat(chunkedSparseResult.getChunkedResults(), hasSize(1)); + assertEquals("3rd small", chunkedSparseResult.getChunkedResults().get(0).matchedText()); + } + { + // this is the large input split in multiple chunks + var chunkedResult = finalListener.results.get(3); + assertThat(chunkedResult, instanceOf(InferenceChunkedSparseEmbeddingResults.class)); + var chunkedSparseResult = (InferenceChunkedSparseEmbeddingResults) chunkedResult; + assertThat(chunkedSparseResult.getChunkedResults(), hasSize(9)); // passage is split into 9 chunks, 10 words each + assertThat(chunkedSparseResult.getChunkedResults().get(0).matchedText(), startsWith("passage_input0 ")); + assertThat(chunkedSparseResult.getChunkedResults().get(1).matchedText(), startsWith(" passage_input10 ")); + assertThat(chunkedSparseResult.getChunkedResults().get(8).matchedText(), startsWith(" passage_input80 ")); + } + } + public void testListenerErrorsWithWrongNumberOfResponses() { List inputs = List.of("1st small", "2nd small", "3rd small"); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java index cd6da4c0ad8d..db7189dc1af1 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.core.action.util.QueryPage; @@ -44,15 +43,14 @@ import org.elasticsearch.xpack.core.ml.action.PutTrainedModelAction; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; -import org.elasticsearch.xpack.core.ml.inference.results.InferenceChunkedTextExpansionResultsTests; -import org.elasticsearch.xpack.core.ml.inference.results.MlChunkedTextEmbeddingFloatResults; -import org.elasticsearch.xpack.core.ml.inference.results.MlChunkedTextEmbeddingFloatResultsTests; -import org.elasticsearch.xpack.core.ml.inference.results.MlChunkedTextExpansionResults; import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; +import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResultsTests; +import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; +import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResultsTests; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextEmbeddingConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TokenizationConfigUpdate; -import org.elasticsearch.xpack.core.utils.FloatConversionUtils; import org.elasticsearch.xpack.inference.InferencePlugin; +import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.services.ServiceFields; import org.junit.After; import org.junit.Before; @@ -663,14 +661,187 @@ public class ElasticsearchInternalServiceTests extends ESTestCase { @SuppressWarnings("unchecked") public void testChunkInfer_e5() { var mlTrainedModelResults = new ArrayList(); - mlTrainedModelResults.add(MlChunkedTextEmbeddingFloatResultsTests.createRandomResults()); - mlTrainedModelResults.add(MlChunkedTextEmbeddingFloatResultsTests.createRandomResults()); + mlTrainedModelResults.add(MlTextEmbeddingResultsTests.createRandomResults()); + mlTrainedModelResults.add(MlTextEmbeddingResultsTests.createRandomResults()); + var response = new InferModelAction.Response(mlTrainedModelResults, "foo", true); + + Client client = mock(Client.class); + when(client.threadPool()).thenReturn(threadPool); + doAnswer(invocationOnMock -> { + var listener = (ActionListener) invocationOnMock.getArguments()[2]; + listener.onResponse(response); + return null; + }).when(client).execute(same(InferModelAction.INSTANCE), any(InferModelAction.Request.class), any(ActionListener.class)); + + var model = new MultilingualE5SmallModel( + "foo", + TaskType.TEXT_EMBEDDING, + "e5", + new MultilingualE5SmallInternalServiceSettings(1, 1, "cross-platform", null) + ); + var service = createService(client); + + var gotResults = new AtomicBoolean(); + var resultsListener = ActionListener.>wrap(chunkedResponse -> { + assertThat(chunkedResponse, hasSize(2)); + assertThat(chunkedResponse.get(0), instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var result1 = (InferenceChunkedTextEmbeddingFloatResults) chunkedResponse.get(0); + assertThat(result1.chunks(), hasSize(1)); + assertArrayEquals( + ((MlTextEmbeddingResults) mlTrainedModelResults.get(0)).getInferenceAsFloat(), + result1.getChunks().get(0).embedding(), + 0.0001f + ); + assertEquals("foo", result1.getChunks().get(0).matchedText()); + assertThat(chunkedResponse.get(1), instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); + var result2 = (InferenceChunkedTextEmbeddingFloatResults) chunkedResponse.get(1); + assertThat(result2.chunks(), hasSize(1)); + assertArrayEquals( + ((MlTextEmbeddingResults) mlTrainedModelResults.get(1)).getInferenceAsFloat(), + result2.getChunks().get(0).embedding(), + 0.0001f + ); + assertEquals("bar", result2.getChunks().get(0).matchedText()); + + gotResults.set(true); + }, ESTestCase::fail); + + service.chunkedInfer( + model, + null, + List.of("foo", "bar"), + Map.of(), + InputType.SEARCH, + new ChunkingOptions(null, null), + InferenceAction.Request.DEFAULT_TIMEOUT, + ActionListener.runAfter(resultsListener, () -> terminate(threadPool)) + ); + + assertTrue("Listener not called", gotResults.get()); + } + + @SuppressWarnings("unchecked") + public void testChunkInfer_Sparse() { + var mlTrainedModelResults = new ArrayList(); + mlTrainedModelResults.add(TextExpansionResultsTests.createRandomResults()); + mlTrainedModelResults.add(TextExpansionResultsTests.createRandomResults()); + var response = new InferModelAction.Response(mlTrainedModelResults, "foo", true); + + Client client = mock(Client.class); + when(client.threadPool()).thenReturn(threadPool); + doAnswer(invocationOnMock -> { + var listener = (ActionListener) invocationOnMock.getArguments()[2]; + listener.onResponse(response); + return null; + }).when(client).execute(same(InferModelAction.INSTANCE), any(InferModelAction.Request.class), any(ActionListener.class)); + + var model = new CustomElandModel( + "foo", + TaskType.SPARSE_EMBEDDING, + "elasticsearch", + new ElasticsearchInternalServiceSettings(1, 1, "model-id", null) + ); + var service = createService(client); + + var gotResults = new AtomicBoolean(); + var resultsListener = ActionListener.>wrap(chunkedResponse -> { + assertThat(chunkedResponse, hasSize(2)); + assertThat(chunkedResponse.get(0), instanceOf(InferenceChunkedSparseEmbeddingResults.class)); + var result1 = (InferenceChunkedSparseEmbeddingResults) chunkedResponse.get(0); + assertEquals( + ((TextExpansionResults) mlTrainedModelResults.get(0)).getWeightedTokens(), + result1.getChunkedResults().get(0).weightedTokens() + ); + assertEquals("foo", result1.getChunkedResults().get(0).matchedText()); + assertThat(chunkedResponse.get(1), instanceOf(InferenceChunkedSparseEmbeddingResults.class)); + var result2 = (InferenceChunkedSparseEmbeddingResults) chunkedResponse.get(1); + assertEquals( + ((TextExpansionResults) mlTrainedModelResults.get(1)).getWeightedTokens(), + result2.getChunkedResults().get(0).weightedTokens() + ); + assertEquals("bar", result2.getChunkedResults().get(0).matchedText()); + gotResults.set(true); + }, ESTestCase::fail); + + service.chunkedInfer( + model, + null, + List.of("foo", "bar"), + Map.of(), + InputType.SEARCH, + new ChunkingOptions(null, null), + InferenceAction.Request.DEFAULT_TIMEOUT, + ActionListener.runAfter(resultsListener, () -> terminate(threadPool)) + ); + + assertTrue("Listener not called", gotResults.get()); + } + + @SuppressWarnings("unchecked") + public void testChunkInferSetsTokenization() { + var expectedSpan = new AtomicInteger(); + var expectedWindowSize = new AtomicReference(); + + Client client = mock(Client.class); + when(client.threadPool()).thenReturn(threadPool); + doAnswer(invocationOnMock -> { + var request = (InferTrainedModelDeploymentAction.Request) invocationOnMock.getArguments()[1]; + assertThat(request.getUpdate(), instanceOf(TokenizationConfigUpdate.class)); + var update = (TokenizationConfigUpdate) request.getUpdate(); + assertEquals(update.getSpanSettings().span(), expectedSpan.get()); + assertEquals(update.getSpanSettings().maxSequenceLength(), expectedWindowSize.get()); + return null; + }).when(client) + .execute( + same(InferTrainedModelDeploymentAction.INSTANCE), + any(InferTrainedModelDeploymentAction.Request.class), + any(ActionListener.class) + ); + + var model = new MultilingualE5SmallModel( + "foo", + TaskType.TEXT_EMBEDDING, + "e5", + new MultilingualE5SmallInternalServiceSettings(1, 1, "cross-platform", null) + ); + var service = createService(client); + + expectedSpan.set(-1); + expectedWindowSize.set(null); + service.chunkedInfer( + model, + List.of("foo", "bar"), + Map.of(), + InputType.SEARCH, + null, + InferenceAction.Request.DEFAULT_TIMEOUT, + ActionListener.wrap(r -> fail("unexpected result"), e -> fail(e.getMessage())) + ); + + expectedSpan.set(-1); + expectedWindowSize.set(256); + service.chunkedInfer( + model, + List.of("foo", "bar"), + Map.of(), + InputType.SEARCH, + new ChunkingOptions(256, null), + InferenceAction.Request.DEFAULT_TIMEOUT, + ActionListener.wrap(r -> fail("unexpected result"), e -> fail(e.getMessage())) + ); + + } + + @SuppressWarnings("unchecked") + public void testChunkInfer_FailsBatch() { + var mlTrainedModelResults = new ArrayList(); + mlTrainedModelResults.add(MlTextEmbeddingResultsTests.createRandomResults()); + mlTrainedModelResults.add(MlTextEmbeddingResultsTests.createRandomResults()); mlTrainedModelResults.add(new ErrorInferenceResults(new RuntimeException("boom"))); var response = new InferModelAction.Response(mlTrainedModelResults, "foo", true); - ThreadPool threadpool = new TestThreadPool("test"); Client client = mock(Client.class); - when(client.threadPool()).thenReturn(threadpool); + when(client.threadPool()).thenReturn(threadPool); doAnswer(invocationOnMock -> { var listener = (ActionListener) invocationOnMock.getArguments()[2]; listener.onResponse(response); @@ -688,182 +859,29 @@ public class ElasticsearchInternalServiceTests extends ESTestCase { var gotResults = new AtomicBoolean(); var resultsListener = ActionListener.>wrap(chunkedResponse -> { assertThat(chunkedResponse, hasSize(3)); - assertThat(chunkedResponse.get(0), instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); - var result1 = (InferenceChunkedTextEmbeddingFloatResults) chunkedResponse.get(0); - assertEquals( - ((MlChunkedTextEmbeddingFloatResults) mlTrainedModelResults.get(0)).getChunks().size(), - result1.getChunks().size() - ); - assertEquals( - ((MlChunkedTextEmbeddingFloatResults) mlTrainedModelResults.get(0)).getChunks().get(0).matchedText(), - result1.getChunks().get(0).matchedText() - ); - assertArrayEquals( - (FloatConversionUtils.floatArrayOf( - ((MlChunkedTextEmbeddingFloatResults) mlTrainedModelResults.get(0)).getChunks().get(0).embedding() - )), - result1.getChunks().get(0).embedding(), - 0.0001f - ); - assertThat(chunkedResponse.get(1), instanceOf(InferenceChunkedTextEmbeddingFloatResults.class)); - var result2 = (InferenceChunkedTextEmbeddingFloatResults) chunkedResponse.get(1); - // assertEquals(((MlChunkedTextEmbeddingFloatResults) mlTrainedModelResults.get(1)).getChunks(), result2.getChunks()); + // a single failure fails the batch + for (var er : chunkedResponse) { + assertThat(er, instanceOf(ErrorChunkedInferenceResults.class)); + assertEquals("boom", ((ErrorChunkedInferenceResults) er).getException().getMessage()); + } - assertEquals( - ((MlChunkedTextEmbeddingFloatResults) mlTrainedModelResults.get(1)).getChunks().size(), - result2.getChunks().size() - ); - assertEquals( - ((MlChunkedTextEmbeddingFloatResults) mlTrainedModelResults.get(1)).getChunks().get(0).matchedText(), - result2.getChunks().get(0).matchedText() - ); - assertArrayEquals( - (FloatConversionUtils.floatArrayOf( - ((MlChunkedTextEmbeddingFloatResults) mlTrainedModelResults.get(1)).getChunks().get(0).embedding() - )), - result2.getChunks().get(0).embedding(), - 0.0001f - ); - - var result3 = (ErrorChunkedInferenceResults) chunkedResponse.get(2); - assertThat(result3.getException(), instanceOf(RuntimeException.class)); - assertThat(result3.getException().getMessage(), containsString("boom")); gotResults.set(true); }, ESTestCase::fail); service.chunkedInfer( model, null, - List.of("foo", "bar"), + List.of("foo", "bar", "baz"), Map.of(), InputType.SEARCH, new ChunkingOptions(null, null), InferenceAction.Request.DEFAULT_TIMEOUT, - ActionListener.runAfter(resultsListener, () -> terminate(threadpool)) + ActionListener.runAfter(resultsListener, () -> terminate(threadPool)) ); - if (gotResults.get() == false) { - terminate(threadpool); - } assertTrue("Listener not called", gotResults.get()); } - @SuppressWarnings("unchecked") - public void testChunkInfer_Sparse() { - var mlTrainedModelResults = new ArrayList(); - mlTrainedModelResults.add(InferenceChunkedTextExpansionResultsTests.createRandomResults()); - mlTrainedModelResults.add(InferenceChunkedTextExpansionResultsTests.createRandomResults()); - mlTrainedModelResults.add(new ErrorInferenceResults(new RuntimeException("boom"))); - var response = new InferModelAction.Response(mlTrainedModelResults, "foo", true); - - ThreadPool threadpool = new TestThreadPool("test"); - Client client = mock(Client.class); - when(client.threadPool()).thenReturn(threadpool); - doAnswer(invocationOnMock -> { - var listener = (ActionListener) invocationOnMock.getArguments()[2]; - listener.onResponse(response); - return null; - }).when(client).execute(same(InferModelAction.INSTANCE), any(InferModelAction.Request.class), any(ActionListener.class)); - - var model = new CustomElandModel( - "foo", - TaskType.SPARSE_EMBEDDING, - "elasticsearch", - new ElasticsearchInternalServiceSettings(1, 1, "model-id", null) - ); - var service = createService(client); - - var gotResults = new AtomicBoolean(); - var resultsListener = ActionListener.>wrap(chunkedResponse -> { - assertThat(chunkedResponse, hasSize(3)); - assertThat(chunkedResponse.get(0), instanceOf(InferenceChunkedSparseEmbeddingResults.class)); - var result1 = (InferenceChunkedSparseEmbeddingResults) chunkedResponse.get(0); - assertEquals(((MlChunkedTextExpansionResults) mlTrainedModelResults.get(0)).getChunks(), result1.getChunkedResults()); - assertThat(chunkedResponse.get(1), instanceOf(InferenceChunkedSparseEmbeddingResults.class)); - var result2 = (InferenceChunkedSparseEmbeddingResults) chunkedResponse.get(1); - assertEquals(((MlChunkedTextExpansionResults) mlTrainedModelResults.get(1)).getChunks(), result2.getChunkedResults()); - var result3 = (ErrorChunkedInferenceResults) chunkedResponse.get(2); - assertThat(result3.getException(), instanceOf(RuntimeException.class)); - assertThat(result3.getException().getMessage(), containsString("boom")); - gotResults.set(true); - }, ESTestCase::fail); - - service.chunkedInfer( - model, - null, - List.of("foo", "bar"), - Map.of(), - InputType.SEARCH, - new ChunkingOptions(null, null), - InferenceAction.Request.DEFAULT_TIMEOUT, - ActionListener.runAfter(resultsListener, () -> terminate(threadpool)) - ); - - if (gotResults.get() == false) { - terminate(threadpool); - } - assertTrue("Listener not called", gotResults.get()); - } - - @SuppressWarnings("unchecked") - public void testChunkInferSetsTokenization() { - var expectedSpan = new AtomicInteger(); - var expectedWindowSize = new AtomicReference(); - - Client client = mock(Client.class); - ThreadPool threadpool = new TestThreadPool("test"); - try { - when(client.threadPool()).thenReturn(threadpool); - doAnswer(invocationOnMock -> { - var request = (InferTrainedModelDeploymentAction.Request) invocationOnMock.getArguments()[1]; - assertThat(request.getUpdate(), instanceOf(TokenizationConfigUpdate.class)); - var update = (TokenizationConfigUpdate) request.getUpdate(); - assertEquals(update.getSpanSettings().span(), expectedSpan.get()); - assertEquals(update.getSpanSettings().maxSequenceLength(), expectedWindowSize.get()); - return null; - }).when(client) - .execute( - same(InferTrainedModelDeploymentAction.INSTANCE), - any(InferTrainedModelDeploymentAction.Request.class), - any(ActionListener.class) - ); - - var model = new MultilingualE5SmallModel( - "foo", - TaskType.TEXT_EMBEDDING, - "e5", - new MultilingualE5SmallInternalServiceSettings(1, 1, "cross-platform", null) - ); - var service = createService(client); - - expectedSpan.set(-1); - expectedWindowSize.set(null); - service.chunkedInfer( - model, - List.of("foo", "bar"), - Map.of(), - InputType.SEARCH, - null, - InferenceAction.Request.DEFAULT_TIMEOUT, - ActionListener.wrap(r -> fail("unexpected result"), e -> fail(e.getMessage())) - ); - - expectedSpan.set(-1); - expectedWindowSize.set(256); - service.chunkedInfer( - model, - List.of("foo", "bar"), - Map.of(), - InputType.SEARCH, - new ChunkingOptions(256, null), - InferenceAction.Request.DEFAULT_TIMEOUT, - ActionListener.wrap(r -> fail("unexpected result"), e -> fail(e.getMessage())) - ); - } finally { - terminate(threadpool); - } - } - public void testParsePersistedConfig_Rerank() { // with task settings { @@ -992,14 +1010,12 @@ public class ElasticsearchInternalServiceTests extends ESTestCase { var inputs = randomList(1, 3, () -> randomAlphaOfLength(4)); var inputType = randomFrom(InputType.SEARCH, InputType.INGEST); var timeout = randomTimeValue(); - var chunk = randomBoolean(); var request = ElasticsearchInternalService.buildInferenceRequest( id, TextEmbeddingConfigUpdate.EMPTY_INSTANCE, inputs, inputType, - timeout, - chunk + timeout ); assertEquals(id, request.getId()); @@ -1009,7 +1025,7 @@ public class ElasticsearchInternalServiceTests extends ESTestCase { request.getPrefixType() ); assertEquals(timeout, request.getInferenceTimeout()); - assertEquals(chunk, request.isChunked()); + assertEquals(false, request.isChunked()); } @SuppressWarnings("unchecked") @@ -1132,6 +1148,32 @@ public class ElasticsearchInternalServiceTests extends ESTestCase { } } + public void testEmbeddingTypeFromTaskTypeAndSettings() { + assertEquals( + EmbeddingRequestChunker.EmbeddingType.SPARSE, + ElasticsearchInternalService.embeddingTypeFromTaskTypeAndSettings( + TaskType.SPARSE_EMBEDDING, + new ElasticsearchInternalServiceSettings(1, 1, "foo", null) + ) + ); + assertEquals( + EmbeddingRequestChunker.EmbeddingType.FLOAT, + ElasticsearchInternalService.embeddingTypeFromTaskTypeAndSettings( + TaskType.TEXT_EMBEDDING, + new MultilingualE5SmallInternalServiceSettings(1, 1, "foo", null) + ) + ); + + var e = expectThrows( + ElasticsearchStatusException.class, + () -> ElasticsearchInternalService.embeddingTypeFromTaskTypeAndSettings( + TaskType.COMPLETION, + new ElasticsearchInternalServiceSettings(1, 1, "foo", null) + ) + ); + assertThat(e.getMessage(), containsString("Chunking is not supported for task type [completion]")); + } + private ElasticsearchInternalService createService(Client client) { var context = new InferenceServiceExtension.InferenceServiceFactoryContext(client, threadPool); return new ElasticsearchInternalService(context); From e065a3789be6939d8411811909f5e9370b418052 Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Mon, 7 Oct 2024 15:07:59 -0400 Subject: [PATCH 19/85] [ML] Stream Cohere Completion (#114080) Implement and enable streaming for Cohere chat completions (v1). Includes processor for ND JSON streaming responses. Co-authored-by: Elastic Machine --- docs/changelog/114080.yaml | 5 + .../inference/common/DelegatingProcessor.java | 4 +- .../cohere/CohereResponseHandler.java | 23 ++- .../cohere/CohereStreamingProcessor.java | 101 ++++++++++ .../CohereCompletionRequestManager.java | 9 +- .../CohereEmbeddingsRequestManager.java | 2 +- .../sender/CohereRerankRequestManager.java | 2 +- .../completion/CohereCompletionRequest.java | 15 +- .../CohereCompletionRequestEntity.java | 8 +- .../NewlineDelimitedByteProcessor.java | 67 +++++++ .../services/cohere/CohereService.java | 6 + .../cohere/CohereResponseHandlerTests.java | 2 +- .../cohere/CohereStreamingProcessorTests.java | 189 ++++++++++++++++++ .../CohereCompletionRequestEntityTests.java | 8 +- .../cohere/CohereCompletionRequestTests.java | 8 +- .../NewlineDelimitedByteProcessorTests.java | 112 +++++++++++ .../services/cohere/CohereServiceTests.java | 50 +++++ 17 files changed, 585 insertions(+), 26 deletions(-) create mode 100644 docs/changelog/114080.yaml create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/cohere/CohereStreamingProcessor.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/streaming/NewlineDelimitedByteProcessor.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/cohere/CohereStreamingProcessorTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/streaming/NewlineDelimitedByteProcessorTests.java diff --git a/docs/changelog/114080.yaml b/docs/changelog/114080.yaml new file mode 100644 index 000000000000..395768c46369 --- /dev/null +++ b/docs/changelog/114080.yaml @@ -0,0 +1,5 @@ +pr: 114080 +summary: Stream Cohere Completion +area: Machine Learning +type: enhancement +issues: [] diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/DelegatingProcessor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/DelegatingProcessor.java index 9af5668ecf75..fc2d890dd89e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/DelegatingProcessor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/DelegatingProcessor.java @@ -21,7 +21,7 @@ import java.util.concurrent.atomic.AtomicLong; public abstract class DelegatingProcessor implements Flow.Processor { private static final Logger log = LogManager.getLogger(DelegatingProcessor.class); private final AtomicLong pendingRequests = new AtomicLong(); - private final AtomicBoolean isClosed = new AtomicBoolean(false); + protected final AtomicBoolean isClosed = new AtomicBoolean(false); private Flow.Subscriber downstream; private Flow.Subscription upstream; @@ -49,7 +49,7 @@ public abstract class DelegatingProcessor implements Flow.Processor @Override public void request(long n) { if (isClosed.get()) { - downstream.onComplete(); // shouldn't happen, but reinforce that we're no longer listening + downstream.onComplete(); } else if (upstream != null) { upstream.request(n); } else { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/cohere/CohereResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/cohere/CohereResponseHandler.java index b5af0b474834..3579cd4100bb 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/cohere/CohereResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/cohere/CohereResponseHandler.java @@ -8,14 +8,19 @@ package org.elasticsearch.xpack.inference.external.cohere; import org.apache.logging.log4j.Logger; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.core.inference.results.StreamingChatCompletionResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.http.retry.BaseResponseHandler; import org.elasticsearch.xpack.inference.external.http.retry.ResponseParser; import org.elasticsearch.xpack.inference.external.http.retry.RetryException; import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.external.response.cohere.CohereErrorResponseEntity; +import org.elasticsearch.xpack.inference.external.response.streaming.NewlineDelimitedByteProcessor; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import java.util.concurrent.Flow; + import static org.elasticsearch.xpack.inference.external.http.HttpUtils.checkForEmptyBody; /** @@ -33,9 +38,11 @@ import static org.elasticsearch.xpack.inference.external.http.HttpUtils.checkFor public class CohereResponseHandler extends BaseResponseHandler { static final String TEXTS_ARRAY_TOO_LARGE_MESSAGE_MATCHER = "invalid request: total number of texts must be at most"; static final String TEXTS_ARRAY_ERROR_MESSAGE = "Received a texts array too large response"; + private final boolean canHandleStreamingResponse; - public CohereResponseHandler(String requestType, ResponseParser parseFunction) { + public CohereResponseHandler(String requestType, ResponseParser parseFunction, boolean canHandleStreamingResponse) { super(requestType, parseFunction, CohereErrorResponseEntity::fromResponse); + this.canHandleStreamingResponse = canHandleStreamingResponse; } @Override @@ -45,6 +52,20 @@ public class CohereResponseHandler extends BaseResponseHandler { checkForEmptyBody(throttlerManager, logger, request, result); } + @Override + public boolean canHandleStreamingResponses() { + return canHandleStreamingResponse; + } + + @Override + public InferenceServiceResults parseResult(Request request, Flow.Publisher flow) { + var ndProcessor = new NewlineDelimitedByteProcessor(); + var cohereProcessor = new CohereStreamingProcessor(); + flow.subscribe(ndProcessor); + ndProcessor.subscribe(cohereProcessor); + return new StreamingChatCompletionResults(cohereProcessor); + } + /** * Validates the status code throws an RetryException if not in the range [200, 300). * diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/cohere/CohereStreamingProcessor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/cohere/CohereStreamingProcessor.java new file mode 100644 index 000000000000..2516a647a91f --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/cohere/CohereStreamingProcessor.java @@ -0,0 +1,101 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.cohere; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.results.StreamingChatCompletionResults; +import org.elasticsearch.xpack.inference.common.DelegatingProcessor; + +import java.io.IOException; +import java.util.ArrayDeque; +import java.util.Deque; +import java.util.Map; +import java.util.Optional; + +class CohereStreamingProcessor extends DelegatingProcessor, StreamingChatCompletionResults.Results> { + private static final Logger log = LogManager.getLogger(CohereStreamingProcessor.class); + + @Override + protected void next(Deque item) throws Exception { + if (item.isEmpty()) { + // discard empty result and go to the next + upstream().request(1); + return; + } + + var results = new ArrayDeque(item.size()); + for (String json : item) { + try (var jsonParser = jsonParser(json)) { + var responseMap = jsonParser.map(); + var eventType = (String) responseMap.get("event_type"); + switch (eventType) { + case "text-generation" -> parseText(responseMap).ifPresent(results::offer); + case "stream-end" -> validateResponse(responseMap); + case "stream-start", "search-queries-generation", "search-results", "citation-generation", "tool-calls-generation", + "tool-calls-chunk" -> { + log.debug("Skipping event type [{}] for line [{}].", eventType, item); + } + default -> throw new IOException("Unknown eventType found: " + eventType); + } + } catch (ElasticsearchStatusException e) { + throw e; + } catch (Exception e) { + log.warn("Failed to parse json from cohere: {}", json); + throw e; + } + } + + if (results.isEmpty()) { + upstream().request(1); + } else { + downstream().onNext(new StreamingChatCompletionResults.Results(results)); + } + } + + private static XContentParser jsonParser(String line) throws IOException { + return XContentFactory.xContent(XContentType.JSON).createParser(XContentParserConfiguration.EMPTY, line); + } + + private Optional parseText(Map responseMap) throws IOException { + var text = (String) responseMap.get("text"); + if (text != null) { + return Optional.of(new StreamingChatCompletionResults.Result(text)); + } else { + throw new IOException("Null text found in text-generation cohere event"); + } + } + + private void validateResponse(Map responseMap) { + var finishReason = (String) responseMap.get("finish_reason"); + switch (finishReason) { + case "ERROR", "ERROR_TOXIC" -> throw new ElasticsearchStatusException( + "Cohere stopped the stream due to an error: {}", + RestStatus.INTERNAL_SERVER_ERROR, + parseErrorMessage(responseMap) + ); + case "ERROR_LIMIT" -> throw new ElasticsearchStatusException( + "Cohere stopped the stream due to an error: {}", + RestStatus.TOO_MANY_REQUESTS, + parseErrorMessage(responseMap) + ); + } + } + + @SuppressWarnings("unchecked") + private String parseErrorMessage(Map responseMap) { + var innerResponseMap = (Map) responseMap.get("response"); + return (String) innerResponseMap.get("text"); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereCompletionRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereCompletionRequestManager.java index 423093a14a9f..ae46fbe0fef8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereCompletionRequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereCompletionRequestManager.java @@ -19,7 +19,6 @@ import org.elasticsearch.xpack.inference.external.request.cohere.completion.Cohe import org.elasticsearch.xpack.inference.external.response.cohere.CohereCompletionResponseEntity; import org.elasticsearch.xpack.inference.services.cohere.completion.CohereCompletionModel; -import java.util.List; import java.util.Objects; import java.util.function.Supplier; @@ -30,7 +29,7 @@ public class CohereCompletionRequestManager extends CohereRequestManager { private static final ResponseHandler HANDLER = createCompletionHandler(); private static ResponseHandler createCompletionHandler() { - return new CohereResponseHandler("cohere completion", CohereCompletionResponseEntity::fromResponse); + return new CohereResponseHandler("cohere completion", CohereCompletionResponseEntity::fromResponse, true); } public static CohereCompletionRequestManager of(CohereCompletionModel model, ThreadPool threadPool) { @@ -51,8 +50,10 @@ public class CohereCompletionRequestManager extends CohereRequestManager { Supplier hasRequestCompletedFunction, ActionListener listener ) { - List docsInput = DocumentsOnlyInput.of(inferenceInputs).getInputs(); - CohereCompletionRequest request = new CohereCompletionRequest(docsInput, model); + var docsOnly = DocumentsOnlyInput.of(inferenceInputs); + var docsInput = docsOnly.getInputs(); + var stream = docsOnly.stream(); + CohereCompletionRequest request = new CohereCompletionRequest(docsInput, model, stream); execute(new ExecutableInferenceRequest(requestSender, logger, request, HANDLER, hasRequestCompletedFunction, listener)); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereEmbeddingsRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereEmbeddingsRequestManager.java index 402f91a0838d..80617ea56e63 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereEmbeddingsRequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereEmbeddingsRequestManager.java @@ -28,7 +28,7 @@ public class CohereEmbeddingsRequestManager extends CohereRequestManager { private static final ResponseHandler HANDLER = createEmbeddingsHandler(); private static ResponseHandler createEmbeddingsHandler() { - return new CohereResponseHandler("cohere text embedding", CohereEmbeddingsResponseEntity::fromResponse); + return new CohereResponseHandler("cohere text embedding", CohereEmbeddingsResponseEntity::fromResponse, false); } public static CohereEmbeddingsRequestManager of(CohereEmbeddingsModel model, ThreadPool threadPool) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereRerankRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereRerankRequestManager.java index 9d565e7124b0..d27812b17399 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereRerankRequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/CohereRerankRequestManager.java @@ -27,7 +27,7 @@ public class CohereRerankRequestManager extends CohereRequestManager { private static final ResponseHandler HANDLER = createCohereResponseHandler(); private static ResponseHandler createCohereResponseHandler() { - return new CohereResponseHandler("cohere rerank", (request, response) -> CohereRankedResponseEntity.fromResponse(response)); + return new CohereResponseHandler("cohere rerank", (request, response) -> CohereRankedResponseEntity.fromResponse(response), false); } public static CohereRerankRequestManager of(CohereRerankModel model, ThreadPool threadPool) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/completion/CohereCompletionRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/completion/CohereCompletionRequest.java index f68f919a7d85..2172dcd4d791 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/completion/CohereCompletionRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/completion/CohereCompletionRequest.java @@ -25,22 +25,20 @@ import java.util.List; import java.util.Objects; public class CohereCompletionRequest extends CohereRequest { - private final CohereAccount account; - private final List input; - private final String modelId; - private final String inferenceEntityId; + private final boolean stream; - public CohereCompletionRequest(List input, CohereCompletionModel model) { + public CohereCompletionRequest(List input, CohereCompletionModel model, boolean stream) { Objects.requireNonNull(model); this.account = CohereAccount.of(model, CohereCompletionRequest::buildDefaultUri); this.input = Objects.requireNonNull(input); this.modelId = model.getServiceSettings().modelId(); this.inferenceEntityId = model.getInferenceEntityId(); + this.stream = stream; } @Override @@ -48,7 +46,7 @@ public class CohereCompletionRequest extends CohereRequest { HttpPost httpPost = new HttpPost(account.uri()); ByteArrayEntity byteEntity = new ByteArrayEntity( - Strings.toString(new CohereCompletionRequestEntity(input, modelId)).getBytes(StandardCharsets.UTF_8) + Strings.toString(new CohereCompletionRequestEntity(input, modelId, isStreaming())).getBytes(StandardCharsets.UTF_8) ); httpPost.setEntity(byteEntity); @@ -62,6 +60,11 @@ public class CohereCompletionRequest extends CohereRequest { return inferenceEntityId; } + @Override + public boolean isStreaming() { + return stream; + } + @Override public URI getURI() { return account.uri(); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/completion/CohereCompletionRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/completion/CohereCompletionRequestEntity.java index 8cb3dc6e3c8e..b834e4335d73 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/completion/CohereCompletionRequestEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/completion/CohereCompletionRequestEntity.java @@ -15,11 +15,11 @@ import java.io.IOException; import java.util.List; import java.util.Objects; -public record CohereCompletionRequestEntity(List input, @Nullable String model) implements ToXContentObject { +public record CohereCompletionRequestEntity(List input, @Nullable String model, boolean stream) implements ToXContentObject { private static final String MESSAGE_FIELD = "message"; - private static final String MODEL = "model"; + private static final String STREAM = "stream"; public CohereCompletionRequestEntity { Objects.requireNonNull(input); @@ -36,6 +36,10 @@ public record CohereCompletionRequestEntity(List input, @Nullable String builder.field(MODEL, model); } + if (stream) { + builder.field(STREAM, true); + } + builder.endObject(); return builder; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/streaming/NewlineDelimitedByteProcessor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/streaming/NewlineDelimitedByteProcessor.java new file mode 100644 index 000000000000..7c44b202a816 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/streaming/NewlineDelimitedByteProcessor.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.streaming; + +import org.elasticsearch.xpack.inference.common.DelegatingProcessor; +import org.elasticsearch.xpack.inference.external.http.HttpResult; + +import java.nio.charset.StandardCharsets; +import java.util.ArrayDeque; +import java.util.Deque; +import java.util.regex.Pattern; + +/** + * Processes HttpResult bytes into lines separated by newlines, delimited by either line-feed or carriage-return line-feed. + * Downstream is responsible for validating the structure of the lines after they have been separated. + * Because Upstream (Apache) can send us a single line split between two HttpResults, this processor will aggregate bytes from the last + * HttpResult and append them to the front of the next HttpResult. + * When onComplete is called, the last batch is always flushed to the downstream onNext. + */ +public class NewlineDelimitedByteProcessor extends DelegatingProcessor> { + private static final Pattern END_OF_LINE_REGEX = Pattern.compile("\\n|\\r\\n"); + private volatile String previousTokens = ""; + + @Override + protected void next(HttpResult item) { + // discard empty result and go to the next + if (item.isBodyEmpty()) { + upstream().request(1); + return; + } + + var body = previousTokens + new String(item.body(), StandardCharsets.UTF_8); + var lines = END_OF_LINE_REGEX.split(body, -1); // -1 because we actually want trailing empty strings + + var results = new ArrayDeque(lines.length); + for (var i = 0; i < lines.length - 1; i++) { + var line = lines[i].trim(); + if (line.isBlank() == false) { + results.offer(line); + } + } + + previousTokens = lines[lines.length - 1].trim(); + + if (results.isEmpty()) { + upstream().request(1); + } else { + downstream().onNext(results); + } + } + + @Override + public void onComplete() { + if (previousTokens.isBlank()) { + super.onComplete(); + } else if (isClosed.compareAndSet(false, true)) { + var results = new ArrayDeque(1); + results.offer(previousTokens); + downstream().onNext(results); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java index 728a4ac137df..3ba93dd8d1b6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java @@ -39,6 +39,7 @@ import org.elasticsearch.xpack.inference.services.cohere.rerank.CohereRerankMode import java.util.List; import java.util.Map; +import java.util.Set; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; import static org.elasticsearch.xpack.inference.services.ServiceUtils.parsePersistedConfigErrorMsg; @@ -288,4 +289,9 @@ public class CohereService extends SenderService { public TransportVersion getMinimalSupportedVersion() { return TransportVersions.ML_INFERENCE_RATE_LIMIT_SETTINGS_ADDED; } + + @Override + public Set supportedStreamingTasks() { + return COMPLETION_ONLY; + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/cohere/CohereResponseHandlerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/cohere/CohereResponseHandlerTests.java index d64ac495c8c9..444415dfc8e4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/cohere/CohereResponseHandlerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/cohere/CohereResponseHandlerTests.java @@ -132,7 +132,7 @@ public class CohereResponseHandlerTests extends ESTestCase { var mockRequest = mock(Request.class); when(mockRequest.getInferenceEntityId()).thenReturn(modelId); var httpResult = new HttpResult(httpResponse, errorMessage == null ? new byte[] {} : responseJson.getBytes(StandardCharsets.UTF_8)); - var handler = new CohereResponseHandler("", (request, result) -> null); + var handler = new CohereResponseHandler("", (request, result) -> null, false); handler.checkForFailureStatusCode(mockRequest, httpResult); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/cohere/CohereStreamingProcessorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/cohere/CohereStreamingProcessorTests.java new file mode 100644 index 000000000000..87d6d63bb8c5 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/cohere/CohereStreamingProcessorTests.java @@ -0,0 +1,189 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.cohere; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentParseException; +import org.elasticsearch.xpack.core.inference.results.StreamingChatCompletionResults; + +import java.io.IOException; +import java.util.ArrayDeque; +import java.util.concurrent.Flow; +import java.util.function.Consumer; + +import static org.elasticsearch.xpack.inference.common.DelegatingProcessorTests.onError; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.isA; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.assertArg; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +public class CohereStreamingProcessorTests extends ESTestCase { + + public void testParseErrorCallsOnError() { + var item = new ArrayDeque(); + item.offer("this is not json"); + + var exception = onError(new CohereStreamingProcessor(), item); + assertThat(exception, instanceOf(XContentParseException.class)); + } + + public void testUnrecognizedEventCallsOnError() { + var item = new ArrayDeque(); + item.offer("{\"event_type\":\"test\"}"); + + var exception = onError(new CohereStreamingProcessor(), item); + assertThat(exception, instanceOf(IOException.class)); + assertThat(exception.getMessage(), equalTo("Unknown eventType found: test")); + } + + public void testMissingTextCallsOnError() { + var item = new ArrayDeque(); + item.offer("{\"event_type\":\"text-generation\"}"); + + var exception = onError(new CohereStreamingProcessor(), item); + assertThat(exception, instanceOf(IOException.class)); + assertThat(exception.getMessage(), equalTo("Null text found in text-generation cohere event")); + } + + public void testEmptyResultsRequestsMoreData() throws Exception { + var emptyDeque = new ArrayDeque(); + + var processor = new CohereStreamingProcessor(); + + Flow.Subscriber downstream = mock(); + processor.subscribe(downstream); + + Flow.Subscription upstream = mock(); + processor.onSubscribe(upstream); + + processor.next(emptyDeque); + + verify(upstream, times(1)).request(1); + verify(downstream, times(0)).onNext(any()); + } + + public void testNonDataEventsAreSkipped() throws Exception { + var item = new ArrayDeque(); + item.offer("{\"event_type\":\"stream-start\"}"); + item.offer("{\"event_type\":\"search-queries-generation\"}"); + item.offer("{\"event_type\":\"search-results\"}"); + item.offer("{\"event_type\":\"citation-generation\"}"); + item.offer("{\"event_type\":\"tool-calls-generation\"}"); + item.offer("{\"event_type\":\"tool-calls-chunk\"}"); + + var processor = new CohereStreamingProcessor(); + + Flow.Subscriber downstream = mock(); + processor.subscribe(downstream); + + Flow.Subscription upstream = mock(); + processor.onSubscribe(upstream); + + processor.next(item); + + verify(upstream, times(1)).request(1); + verify(downstream, times(0)).onNext(any()); + } + + public void testParseError() { + var json = "{\"event_type\":\"stream-end\", \"finish_reason\":\"ERROR\", \"response\":{ \"text\": \"a wild error appears\" }}"; + testError(json, e -> { + assertThat(e.status().getStatus(), equalTo(500)); + assertThat(e.getMessage(), containsString("a wild error appears")); + }); + } + + private void testError(String json, Consumer test) { + var item = new ArrayDeque(); + item.offer(json); + + var processor = new CohereStreamingProcessor(); + + Flow.Subscriber downstream = mock(); + processor.subscribe(downstream); + + Flow.Subscription upstream = mock(); + processor.onSubscribe(upstream); + + try { + processor.next(item); + fail("Expected an exception to be thrown"); + } catch (ElasticsearchStatusException e) { + test.accept(e); + } catch (Exception e) { + fail(e, "Expected an exception of type ElasticsearchStatusException to be thrown"); + } + } + + public void testParseToxic() { + var json = "{\"event_type\":\"stream-end\", \"finish_reason\":\"ERROR_TOXIC\", \"response\":{ \"text\": \"by britney spears\" }}"; + testError(json, e -> { + assertThat(e.status().getStatus(), equalTo(500)); + assertThat(e.getMessage(), containsString("by britney spears")); + }); + } + + public void testParseLimit() { + var json = "{\"event_type\":\"stream-end\", \"finish_reason\":\"ERROR_LIMIT\", \"response\":{ \"text\": \"over the limit\" }}"; + testError(json, e -> { + assertThat(e.status().getStatus(), equalTo(429)); + assertThat(e.getMessage(), containsString("over the limit")); + }); + } + + public void testNonErrorFinishesAreSkipped() throws Exception { + var item = new ArrayDeque(); + item.offer("{\"event_type\":\"stream-end\", \"finish_reason\":\"COMPLETE\"}"); + item.offer("{\"event_type\":\"stream-end\", \"finish_reason\":\"STOP_SEQUENCE\"}"); + item.offer("{\"event_type\":\"stream-end\", \"finish_reason\":\"USER_CANCEL\"}"); + item.offer("{\"event_type\":\"stream-end\", \"finish_reason\":\"MAX_TOKENS\"}"); + + var processor = new CohereStreamingProcessor(); + + Flow.Subscriber downstream = mock(); + processor.subscribe(downstream); + + Flow.Subscription upstream = mock(); + processor.onSubscribe(upstream); + + processor.next(item); + + verify(upstream, times(1)).request(1); + verify(downstream, times(0)).onNext(any()); + } + + public void testParseCohereData() throws Exception { + var item = new ArrayDeque(); + item.offer("{\"event_type\":\"text-generation\", \"text\":\"hello there\"}"); + + var processor = new CohereStreamingProcessor(); + + Flow.Subscriber downstream = mock(); + processor.subscribe(downstream); + + Flow.Subscription upstream = mock(); + processor.onSubscribe(upstream); + + processor.next(item); + + verify(upstream, times(0)).request(1); + verify(downstream, times(1)).onNext(assertArg(chunks -> { + assertThat(chunks, isA(StreamingChatCompletionResults.Results.class)); + var results = (StreamingChatCompletionResults.Results) chunks; + assertThat(results.results().size(), equalTo(1)); + assertThat(results.results().getFirst().delta(), equalTo("hello there")); + })); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereCompletionRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereCompletionRequestEntityTests.java index dbe6a9438d88..c3b534f42e7e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereCompletionRequestEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereCompletionRequestEntityTests.java @@ -22,7 +22,7 @@ import static org.hamcrest.CoreMatchers.is; public class CohereCompletionRequestEntityTests extends ESTestCase { public void testXContent_WritesAllFields() throws IOException { - var entity = new CohereCompletionRequestEntity(List.of("some input"), "model"); + var entity = new CohereCompletionRequestEntity(List.of("some input"), "model", false); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); entity.toXContent(builder, null); @@ -33,7 +33,7 @@ public class CohereCompletionRequestEntityTests extends ESTestCase { } public void testXContent_DoesNotWriteModelIfNotSpecified() throws IOException { - var entity = new CohereCompletionRequestEntity(List.of("some input"), null); + var entity = new CohereCompletionRequestEntity(List.of("some input"), null, false); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); entity.toXContent(builder, null); @@ -44,10 +44,10 @@ public class CohereCompletionRequestEntityTests extends ESTestCase { } public void testXContent_ThrowsIfInputIsNull() { - expectThrows(NullPointerException.class, () -> new CohereCompletionRequestEntity(null, null)); + expectThrows(NullPointerException.class, () -> new CohereCompletionRequestEntity(null, null, false)); } public void testXContent_ThrowsIfMessageInInputIsNull() { - expectThrows(NullPointerException.class, () -> new CohereCompletionRequestEntity(List.of((String) null), null)); + expectThrows(NullPointerException.class, () -> new CohereCompletionRequestEntity(List.of((String) null), null, false)); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereCompletionRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereCompletionRequestTests.java index d6d0d5c00eaf..f2e6d4305f9e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereCompletionRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereCompletionRequestTests.java @@ -26,7 +26,7 @@ import static org.hamcrest.Matchers.sameInstance; public class CohereCompletionRequestTests extends ESTestCase { public void testCreateRequest_UrlDefined() throws IOException { - var request = new CohereCompletionRequest(List.of("abc"), CohereCompletionModelTests.createModel("url", "secret", null)); + var request = new CohereCompletionRequest(List.of("abc"), CohereCompletionModelTests.createModel("url", "secret", null), false); var httpRequest = request.createHttpRequest(); assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); @@ -43,7 +43,7 @@ public class CohereCompletionRequestTests extends ESTestCase { } public void testCreateRequest_ModelDefined() throws IOException { - var request = new CohereCompletionRequest(List.of("abc"), CohereCompletionModelTests.createModel("url", "secret", "model")); + var request = new CohereCompletionRequest(List.of("abc"), CohereCompletionModelTests.createModel("url", "secret", "model"), false); var httpRequest = request.createHttpRequest(); assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); @@ -60,14 +60,14 @@ public class CohereCompletionRequestTests extends ESTestCase { } public void testTruncate_ReturnsSameInstance() { - var request = new CohereCompletionRequest(List.of("abc"), CohereCompletionModelTests.createModel("url", "secret", "model")); + var request = new CohereCompletionRequest(List.of("abc"), CohereCompletionModelTests.createModel("url", "secret", "model"), false); var truncatedRequest = request.truncate(); assertThat(truncatedRequest, sameInstance(request)); } public void testTruncationInfo_ReturnsNull() { - var request = new CohereCompletionRequest(List.of("abc"), CohereCompletionModelTests.createModel("url", "secret", "model")); + var request = new CohereCompletionRequest(List.of("abc"), CohereCompletionModelTests.createModel("url", "secret", "model"), false); assertNull(request.getTruncationInfo()); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/streaming/NewlineDelimitedByteProcessorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/streaming/NewlineDelimitedByteProcessorTests.java new file mode 100644 index 000000000000..488cbccd0e7c --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/streaming/NewlineDelimitedByteProcessorTests.java @@ -0,0 +1,112 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.streaming; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.junit.Before; +import org.mockito.ArgumentCaptor; + +import java.nio.charset.StandardCharsets; +import java.util.Deque; +import java.util.concurrent.Flow; +import java.util.stream.IntStream; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.assertArg; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +public class NewlineDelimitedByteProcessorTests extends ESTestCase { + private Flow.Subscription upstream; + private Flow.Subscriber> downstream; + private NewlineDelimitedByteProcessor processor; + + @Before + public void setUp() throws Exception { + super.setUp(); + upstream = mock(); + downstream = mock(); + processor = new NewlineDelimitedByteProcessor(); + processor.onSubscribe(upstream); + processor.subscribe(downstream); + } + + public void testEmptyBody() { + processor.next(result(null)); + processor.onComplete(); + verify(upstream, times(1)).request(1); + verify(downstream, times(0)).onNext(any()); + } + + private HttpResult result(String response) { + return new HttpResult(mock(), response == null ? new byte[0] : response.getBytes(StandardCharsets.UTF_8)); + } + + public void testEmptyParseResponse() { + processor.next(result("")); + verify(upstream, times(1)).request(1); + verify(downstream, times(0)).onNext(any()); + } + + public void testValidResponse() { + processor.next(result("{\"hello\":\"there\"}\n")); + verify(downstream, times(1)).onNext(assertArg(deque -> { + assertThat(deque, notNullValue()); + assertThat(deque.size(), is(1)); + assertThat(deque.getFirst(), is("{\"hello\":\"there\"}")); + })); + } + + public void testMultipleValidResponse() { + processor.next(result(""" + {"value": 1} + {"value": 2} + {"value": 3} + """)); + verify(upstream, times(0)).request(1); + verify(downstream, times(1)).onNext(assertArg(deque -> { + assertThat(deque, notNullValue()); + assertThat(deque.size(), is(3)); + var items = deque.iterator(); + IntStream.range(1, 4).forEach(i -> { + assertThat(items.hasNext(), is(true)); + assertThat(items.next(), containsString(String.valueOf(i))); + }); + })); + } + + public void testOnCompleteFlushesResponse() { + processor.next(result(""" + {"value": 1}""")); + + // onNext should not be called with only one value + verify(downstream, times(0)).onNext(any()); + verify(downstream, times(0)).onComplete(); + + // onComplete should flush the value pending, and onNext should be called + processor.onComplete(); + verify(downstream, times(1)).onNext(assertArg(deque -> { + assertThat(deque, notNullValue()); + assertThat(deque.size(), is(1)); + var item = deque.getFirst(); + assertThat(item, containsString(String.valueOf(1))); + })); + verify(downstream, times(0)).onComplete(); + + // next time the downstream requests data, onComplete is called + var downstreamSubscription = ArgumentCaptor.forClass(Flow.Subscription.class); + verify(downstream).onSubscribe(downstreamSubscription.capture()); + downstreamSubscription.getValue().request(1); + verify(downstream, times(1)).onComplete(); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java index 22503108b526..420a635963a2 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java @@ -38,6 +38,8 @@ import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSender; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.elasticsearch.xpack.inference.services.InferenceEventsAssertion; +import org.elasticsearch.xpack.inference.services.cohere.completion.CohereCompletionModelTests; import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingType; import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsModel; import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsModelTests; @@ -1349,6 +1351,54 @@ public class CohereServiceTests extends ESTestCase { assertEquals(SimilarityMeasure.DOT_PRODUCT, CohereService.defaultSimilarity()); } + public void testInfer_StreamRequest() throws Exception { + String responseJson = """ + {"event_type":"text-generation", "text":"hello"} + {"event_type":"text-generation", "text":"there"} + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var result = streamChatCompletion(); + + InferenceEventsAssertion.assertThat(result).hasFinishedStream().hasNoErrors().hasEvent(""" + {"completion":[{"delta":"hello"},{"delta":"there"}]}"""); + } + + private InferenceServiceResults streamChatCompletion() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + try (var service = new CohereService(senderFactory, createWithEmptySettings(threadPool))) { + var model = CohereCompletionModelTests.createModel(getUrl(webServer), "secret", "model"); + PlainActionFuture listener = new PlainActionFuture<>(); + service.infer( + model, + null, + List.of("abc"), + true, + new HashMap<>(), + InputType.INGEST, + InferenceAction.Request.DEFAULT_TIMEOUT, + listener + ); + + return listener.actionGet(TIMEOUT); + } + } + + public void testInfer_StreamRequest_ErrorResponse() throws Exception { + String responseJson = """ + { "event_type":"stream-end", "finish_reason":"ERROR", "response":{ "text": "how dare you" } } + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var result = streamChatCompletion(); + + InferenceEventsAssertion.assertThat(result) + .hasFinishedStream() + .hasNoEvents() + .hasErrorWithStatusCode(500) + .hasErrorContaining("how dare you"); + } + private Map getRequestConfigMap( Map serviceSettings, Map taskSettings, From 7753c5216a5195890c3d4a72dcb7c6f9056c8eaa Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Mon, 7 Oct 2024 15:10:36 -0400 Subject: [PATCH 20/85] [ML] Add Streaming Inference spec (#113812) API for `/_inference/{task_type}/{inference_id}/_stream` and `/_inference/{inference_id}/_stream` Request is `application/json` Response is `text/event-stream` --- docs/changelog/113812.yaml | 5 ++ .../api/inference.stream_inference.json | 49 +++++++++++++++++++ 2 files changed, 54 insertions(+) create mode 100644 docs/changelog/113812.yaml create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/inference.stream_inference.json diff --git a/docs/changelog/113812.yaml b/docs/changelog/113812.yaml new file mode 100644 index 000000000000..04498b4ae5f7 --- /dev/null +++ b/docs/changelog/113812.yaml @@ -0,0 +1,5 @@ +pr: 113812 +summary: Add Streaming Inference spec +area: Machine Learning +type: enhancement +issues: [] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.stream_inference.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.stream_inference.json new file mode 100644 index 000000000000..32b4b2f31183 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.stream_inference.json @@ -0,0 +1,49 @@ +{ + "inference.stream_inference":{ + "documentation":{ + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/post-stream-inference-api.html", + "description":"Perform streaming inference" + }, + "stability":"experimental", + "visibility":"public", + "headers":{ + "accept": [ "text/event-stream"], + "content_type": ["application/json"] + }, + "url":{ + "paths":[ + { + "path":"/_inference/{inference_id}/_stream", + "methods":[ + "POST" + ], + "parts":{ + "inference_id":{ + "type":"string", + "description":"The inference Id" + } + } + }, + { + "path":"/_inference/{task_type}/{inference_id}/_stream", + "methods":[ + "POST" + ], + "parts":{ + "task_type":{ + "type":"string", + "description":"The task type" + }, + "inference_id":{ + "type":"string", + "description":"The inference Id" + } + } + } + ] + }, + "body":{ + "description":"The inference payload" + } + } +} From e7cd5c9dea0e200af9f2bfb791e41ad555150846 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Mon, 7 Oct 2024 16:31:05 -0300 Subject: [PATCH 21/85] Add postal_code support to the City and Enterprise databases (#114193) --- .../org/elasticsearch/ingest/geoip/Database.java | 9 ++++++--- .../ingest/geoip/MaxmindIpDataLookups.java | 13 +++++++++++++ .../ingest/geoip/GeoIpProcessorFactoryTests.java | 3 ++- .../ingest/geoip/GeoIpProcessorTests.java | 6 ++++-- .../ingest/geoip/MaxMindSupportTests.java | 6 +++--- 5 files changed, 28 insertions(+), 9 deletions(-) diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java index 31d7a43e3869..10817c920e1e 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java @@ -40,7 +40,8 @@ enum Database { Property.REGION_NAME, Property.CITY_NAME, Property.TIMEZONE, - Property.LOCATION + Property.LOCATION, + Property.POSTAL_CODE ), Set.of( Property.COUNTRY_ISO_CODE, @@ -108,7 +109,8 @@ enum Database { Property.MOBILE_COUNTRY_CODE, Property.MOBILE_NETWORK_CODE, Property.USER_TYPE, - Property.CONNECTION_TYPE + Property.CONNECTION_TYPE, + Property.POSTAL_CODE ), Set.of( Property.COUNTRY_ISO_CODE, @@ -228,7 +230,8 @@ enum Database { MOBILE_NETWORK_CODE, CONNECTION_TYPE, USER_TYPE, - TYPE; + TYPE, + POSTAL_CODE; /** * Parses a string representation of a property into an actual Property instance. Not all properties that exist are diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/MaxmindIpDataLookups.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/MaxmindIpDataLookups.java index 5b22b3f4005a..2e0d4a031a07 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/MaxmindIpDataLookups.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/MaxmindIpDataLookups.java @@ -23,6 +23,7 @@ import com.maxmind.geoip2.model.EnterpriseResponse; import com.maxmind.geoip2.model.IspResponse; import com.maxmind.geoip2.record.Continent; import com.maxmind.geoip2.record.Location; +import com.maxmind.geoip2.record.Postal; import com.maxmind.geoip2.record.Subdivision; import org.elasticsearch.common.network.InetAddresses; @@ -139,6 +140,7 @@ final class MaxmindIpDataLookups { Location location = response.getLocation(); Continent continent = response.getContinent(); Subdivision subdivision = response.getMostSpecificSubdivision(); + Postal postal = response.getPostal(); Map data = new HashMap<>(); for (Database.Property property : this.properties) { @@ -206,6 +208,11 @@ final class MaxmindIpDataLookups { data.put("location", locationObject); } } + case POSTAL_CODE -> { + if (postal != null && postal.getCode() != null) { + data.put("postal_code", postal.getCode()); + } + } } } return data; @@ -324,6 +331,7 @@ final class MaxmindIpDataLookups { Location location = response.getLocation(); Continent continent = response.getContinent(); Subdivision subdivision = response.getMostSpecificSubdivision(); + Postal postal = response.getPostal(); Long asn = response.getTraits().getAutonomousSystemNumber(); String organizationName = response.getTraits().getAutonomousSystemOrganization(); @@ -413,6 +421,11 @@ final class MaxmindIpDataLookups { data.put("location", locationObject); } } + case POSTAL_CODE -> { + if (postal != null && postal.getCode() != null) { + data.put("postal_code", postal.getCode()); + } + } case ASN -> { if (asn != null) { data.put("asn", asn); diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java index 9972db26b364..d4017268b53d 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -274,7 +274,8 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { e.getMessage(), equalTo( "[properties] illegal property value [invalid]. valid values are [IP, COUNTRY_ISO_CODE, " - + "COUNTRY_NAME, CONTINENT_CODE, CONTINENT_NAME, REGION_ISO_CODE, REGION_NAME, CITY_NAME, TIMEZONE, LOCATION]" + + "COUNTRY_NAME, CONTINENT_CODE, CONTINENT_NAME, REGION_ISO_CODE, REGION_NAME, CITY_NAME, TIMEZONE, " + + "LOCATION, POSTAL_CODE]" ) ); diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java index 46024cb6ad21..3fb082f33b3f 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java @@ -222,7 +222,7 @@ public class GeoIpProcessorTests extends ESTestCase { @SuppressWarnings("unchecked") Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); assertThat(geoData, notNullValue()); - assertThat(geoData.size(), equalTo(10)); + assertThat(geoData.size(), equalTo(11)); assertThat(geoData.get("ip"), equalTo(ip)); assertThat(geoData.get("country_iso_code"), equalTo("US")); assertThat(geoData.get("country_name"), equalTo("United States")); @@ -233,6 +233,7 @@ public class GeoIpProcessorTests extends ESTestCase { assertThat(geoData.get("city_name"), equalTo("Homestead")); assertThat(geoData.get("timezone"), equalTo("America/New_York")); assertThat(geoData.get("location"), equalTo(Map.of("lat", 25.4573d, "lon", -80.4572d))); + assertThat(geoData.get("postal_code"), equalTo("33035")); } public void testCityWithMissingLocation() throws Exception { @@ -470,7 +471,7 @@ public class GeoIpProcessorTests extends ESTestCase { @SuppressWarnings("unchecked") Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); assertThat(geoData, notNullValue()); - assertThat(geoData.size(), equalTo(24)); + assertThat(geoData.size(), equalTo(25)); assertThat(geoData.get("ip"), equalTo(ip)); assertThat(geoData.get("country_iso_code"), equalTo("US")); assertThat(geoData.get("country_name"), equalTo("United States")); @@ -481,6 +482,7 @@ public class GeoIpProcessorTests extends ESTestCase { assertThat(geoData.get("city_name"), equalTo("Chatham")); assertThat(geoData.get("timezone"), equalTo("America/New_York")); assertThat(geoData.get("location"), equalTo(Map.of("lat", 42.3478, "lon", -73.5549))); + assertThat(geoData.get("postal_code"), equalTo("12037")); assertThat(geoData.get("asn"), equalTo(14671L)); assertThat(geoData.get("organization_name"), equalTo("FairPoint Communications")); assertThat(geoData.get("network"), equalTo("74.209.16.0/20")); diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxMindSupportTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxMindSupportTests.java index 3b1200363778..7a3de6ca199a 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxMindSupportTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxMindSupportTests.java @@ -84,7 +84,8 @@ public class MaxMindSupportTests extends ESTestCase { "location.longitude", "location.timeZone", "mostSpecificSubdivision.isoCode", - "mostSpecificSubdivision.name" + "mostSpecificSubdivision.name", + "postal.code" ); private static final Set CITY_UNSUPPORTED_FIELDS = Set.of( "city.confidence", @@ -109,7 +110,6 @@ public class MaxMindSupportTests extends ESTestCase { "mostSpecificSubdivision.confidence", "mostSpecificSubdivision.geoNameId", "mostSpecificSubdivision.names", - "postal.code", "postal.confidence", "registeredCountry.confidence", "registeredCountry.geoNameId", @@ -223,6 +223,7 @@ public class MaxMindSupportTests extends ESTestCase { "location.timeZone", "mostSpecificSubdivision.isoCode", "mostSpecificSubdivision.name", + "postal.code", "traits.anonymous", "traits.anonymousVpn", "traits.autonomousSystemNumber", @@ -263,7 +264,6 @@ public class MaxMindSupportTests extends ESTestCase { "mostSpecificSubdivision.confidence", "mostSpecificSubdivision.geoNameId", "mostSpecificSubdivision.names", - "postal.code", "postal.confidence", "registeredCountry.confidence", "registeredCountry.geoNameId", From 3595956d7ee7bfd5272def08ce419bd998e3b2f5 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 8 Oct 2024 08:06:42 +1100 Subject: [PATCH 22/85] Mute org.elasticsearch.ingest.geoip.IpinfoIpDataLookupsTests org.elasticsearch.ingest.geoip.IpinfoIpDataLookupsTests #114266 --- muted-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index f205c9ce44a0..93893d7103af 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -368,6 +368,8 @@ tests: - class: org.elasticsearch.logsdb.datageneration.DataGeneratorTests method: testDataGeneratorProducesValidMappingAndDocument issue: https://github.com/elastic/elasticsearch/issues/114188 +- class: org.elasticsearch.ingest.geoip.IpinfoIpDataLookupsTests + issue: https://github.com/elastic/elasticsearch/issues/114266 # Examples: # From 7bbebbd37d7510e32a8b50ffd501bd0ba09e6d56 Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Mon, 7 Oct 2024 20:24:12 -0500 Subject: [PATCH 23/85] Supporting more maxmind fields in the geoip processor (#114268) --- .../elasticsearch/ingest/geoip/Database.java | 27 +++++++++-- .../ingest/geoip/MaxmindIpDataLookups.java | 48 +++++++++++++++++++ .../geoip/GeoIpProcessorFactoryTests.java | 6 +-- .../ingest/geoip/GeoIpProcessorTests.java | 17 +++++-- .../ingest/geoip/MaxMindSupportTests.java | 16 +++---- 5 files changed, 95 insertions(+), 19 deletions(-) diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java index 10817c920e1e..128c16e16376 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java @@ -32,6 +32,7 @@ enum Database { City( Set.of( Property.IP, + Property.COUNTRY_IN_EUROPEAN_UNION, Property.COUNTRY_ISO_CODE, Property.CONTINENT_CODE, Property.COUNTRY_NAME, @@ -41,7 +42,8 @@ enum Database { Property.CITY_NAME, Property.TIMEZONE, Property.LOCATION, - Property.POSTAL_CODE + Property.POSTAL_CODE, + Property.ACCURACY_RADIUS ), Set.of( Property.COUNTRY_ISO_CODE, @@ -54,7 +56,14 @@ enum Database { ) ), Country( - Set.of(Property.IP, Property.CONTINENT_CODE, Property.CONTINENT_NAME, Property.COUNTRY_NAME, Property.COUNTRY_ISO_CODE), + Set.of( + Property.IP, + Property.CONTINENT_CODE, + Property.CONTINENT_NAME, + Property.COUNTRY_NAME, + Property.COUNTRY_IN_EUROPEAN_UNION, + Property.COUNTRY_ISO_CODE + ), Set.of(Property.CONTINENT_NAME, Property.COUNTRY_NAME, Property.COUNTRY_ISO_CODE) ), Asn( @@ -85,12 +94,15 @@ enum Database { Enterprise( Set.of( Property.IP, + Property.COUNTRY_CONFIDENCE, + Property.COUNTRY_IN_EUROPEAN_UNION, Property.COUNTRY_ISO_CODE, Property.COUNTRY_NAME, Property.CONTINENT_CODE, Property.CONTINENT_NAME, Property.REGION_ISO_CODE, Property.REGION_NAME, + Property.CITY_CONFIDENCE, Property.CITY_NAME, Property.TIMEZONE, Property.LOCATION, @@ -110,7 +122,9 @@ enum Database { Property.MOBILE_NETWORK_CODE, Property.USER_TYPE, Property.CONNECTION_TYPE, - Property.POSTAL_CODE + Property.POSTAL_CODE, + Property.POSTAL_CONFIDENCE, + Property.ACCURACY_RADIUS ), Set.of( Property.COUNTRY_ISO_CODE, @@ -205,12 +219,15 @@ enum Database { enum Property { IP, + COUNTRY_CONFIDENCE, + COUNTRY_IN_EUROPEAN_UNION, COUNTRY_ISO_CODE, COUNTRY_NAME, CONTINENT_CODE, CONTINENT_NAME, REGION_ISO_CODE, REGION_NAME, + CITY_CONFIDENCE, CITY_NAME, TIMEZONE, LOCATION, @@ -231,7 +248,9 @@ enum Database { CONNECTION_TYPE, USER_TYPE, TYPE, - POSTAL_CODE; + POSTAL_CODE, + POSTAL_CONFIDENCE, + ACCURACY_RADIUS; /** * Parses a string representation of a property into an actual Property instance. Not all properties that exist are diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/MaxmindIpDataLookups.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/MaxmindIpDataLookups.java index 2e0d4a031a07..e7c348193803 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/MaxmindIpDataLookups.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/MaxmindIpDataLookups.java @@ -146,6 +146,12 @@ final class MaxmindIpDataLookups { for (Database.Property property : this.properties) { switch (property) { case IP -> data.put("ip", response.getTraits().getIpAddress()); + case COUNTRY_IN_EUROPEAN_UNION -> { + if (country.getIsoCode() != null) { + // isInEuropeanUnion is a boolean so it can't be null. But it really only makes sense if we have a country + data.put("country_in_european_union", country.isInEuropeanUnion()); + } + } case COUNTRY_ISO_CODE -> { String countryIsoCode = country.getIsoCode(); if (countryIsoCode != null) { @@ -208,6 +214,12 @@ final class MaxmindIpDataLookups { data.put("location", locationObject); } } + case ACCURACY_RADIUS -> { + Integer accuracyRadius = location.getAccuracyRadius(); + if (accuracyRadius != null) { + data.put("accuracy_radius", accuracyRadius); + } + } case POSTAL_CODE -> { if (postal != null && postal.getCode() != null) { data.put("postal_code", postal.getCode()); @@ -261,6 +273,12 @@ final class MaxmindIpDataLookups { for (Database.Property property : this.properties) { switch (property) { case IP -> data.put("ip", response.getTraits().getIpAddress()); + case COUNTRY_IN_EUROPEAN_UNION -> { + if (country.getIsoCode() != null) { + // isInEuropeanUnion is a boolean so it can't be null. But it really only makes sense if we have a country + data.put("country_in_european_union", country.isInEuropeanUnion()); + } + } case COUNTRY_ISO_CODE -> { String countryIsoCode = country.getIsoCode(); if (countryIsoCode != null) { @@ -359,6 +377,18 @@ final class MaxmindIpDataLookups { for (Database.Property property : this.properties) { switch (property) { case IP -> data.put("ip", response.getTraits().getIpAddress()); + case COUNTRY_CONFIDENCE -> { + Integer countryConfidence = country.getConfidence(); + if (countryConfidence != null) { + data.put("country_confidence", countryConfidence); + } + } + case COUNTRY_IN_EUROPEAN_UNION -> { + if (country.getIsoCode() != null) { + // isInEuropeanUnion is a boolean so it can't be null. But it really only makes sense if we have a country + data.put("country_in_european_union", country.isInEuropeanUnion()); + } + } case COUNTRY_ISO_CODE -> { String countryIsoCode = country.getIsoCode(); if (countryIsoCode != null) { @@ -399,6 +429,12 @@ final class MaxmindIpDataLookups { data.put("region_name", subdivisionName); } } + case CITY_CONFIDENCE -> { + Integer cityConfidence = city.getConfidence(); + if (cityConfidence != null) { + data.put("city_confidence", cityConfidence); + } + } case CITY_NAME -> { String cityName = city.getName(); if (cityName != null) { @@ -421,11 +457,23 @@ final class MaxmindIpDataLookups { data.put("location", locationObject); } } + case ACCURACY_RADIUS -> { + Integer accuracyRadius = location.getAccuracyRadius(); + if (accuracyRadius != null) { + data.put("accuracy_radius", accuracyRadius); + } + } case POSTAL_CODE -> { if (postal != null && postal.getCode() != null) { data.put("postal_code", postal.getCode()); } } + case POSTAL_CONFIDENCE -> { + Integer postalConfidence = postal.getConfidence(); + if (postalConfidence != null) { + data.put("postal_confidence", postalConfidence); + } + } case ASN -> { if (asn != null) { data.put("asn", asn); diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java index d4017268b53d..cfea54d2520b 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -195,7 +195,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { equalTo( "[properties] illegal property value [" + asnProperty - + "]. valid values are [IP, COUNTRY_ISO_CODE, COUNTRY_NAME, CONTINENT_CODE, CONTINENT_NAME]" + + "]. valid values are [IP, COUNTRY_IN_EUROPEAN_UNION, COUNTRY_ISO_CODE, COUNTRY_NAME, CONTINENT_CODE, CONTINENT_NAME]" ) ); } @@ -273,9 +273,9 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { assertThat( e.getMessage(), equalTo( - "[properties] illegal property value [invalid]. valid values are [IP, COUNTRY_ISO_CODE, " + "[properties] illegal property value [invalid]. valid values are [IP, COUNTRY_IN_EUROPEAN_UNION, COUNTRY_ISO_CODE, " + "COUNTRY_NAME, CONTINENT_CODE, CONTINENT_NAME, REGION_ISO_CODE, REGION_NAME, CITY_NAME, TIMEZONE, " - + "LOCATION, POSTAL_CODE]" + + "LOCATION, POSTAL_CODE, ACCURACY_RADIUS]" ) ); diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java index 3fb082f33b3f..ffc40324bd88 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java @@ -106,8 +106,9 @@ public class GeoIpProcessorTests extends ESTestCase { @SuppressWarnings("unchecked") Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); assertThat(geoData, notNullValue()); - assertThat(geoData.size(), equalTo(7)); + assertThat(geoData.size(), equalTo(9)); assertThat(geoData.get("ip"), equalTo(ip)); + assertThat(geoData.get("country_in_european_union"), equalTo(false)); assertThat(geoData.get("country_iso_code"), equalTo("US")); assertThat(geoData.get("country_name"), equalTo("United States")); assertThat(geoData.get("continent_code"), equalTo("NA")); @@ -222,8 +223,9 @@ public class GeoIpProcessorTests extends ESTestCase { @SuppressWarnings("unchecked") Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); assertThat(geoData, notNullValue()); - assertThat(geoData.size(), equalTo(11)); + assertThat(geoData.size(), equalTo(13)); assertThat(geoData.get("ip"), equalTo(ip)); + assertThat(geoData.get("country_in_european_union"), equalTo(false)); assertThat(geoData.get("country_iso_code"), equalTo("US")); assertThat(geoData.get("country_name"), equalTo("United States")); assertThat(geoData.get("continent_code"), equalTo("NA")); @@ -233,6 +235,7 @@ public class GeoIpProcessorTests extends ESTestCase { assertThat(geoData.get("city_name"), equalTo("Homestead")); assertThat(geoData.get("timezone"), equalTo("America/New_York")); assertThat(geoData.get("location"), equalTo(Map.of("lat", 25.4573d, "lon", -80.4572d))); + assertThat(geoData.get("accuracy_radius"), equalTo(50)); assertThat(geoData.get("postal_code"), equalTo("33035")); } @@ -288,8 +291,9 @@ public class GeoIpProcessorTests extends ESTestCase { @SuppressWarnings("unchecked") Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); assertThat(geoData, notNullValue()); - assertThat(geoData.size(), equalTo(5)); + assertThat(geoData.size(), equalTo(6)); assertThat(geoData.get("ip"), equalTo(ip)); + assertThat(geoData.get("country_in_european_union"), equalTo(true)); assertThat(geoData.get("country_iso_code"), equalTo("NL")); assertThat(geoData.get("country_name"), equalTo("Netherlands")); assertThat(geoData.get("continent_code"), equalTo("EU")); @@ -471,18 +475,23 @@ public class GeoIpProcessorTests extends ESTestCase { @SuppressWarnings("unchecked") Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); assertThat(geoData, notNullValue()); - assertThat(geoData.size(), equalTo(25)); + assertThat(geoData.size(), equalTo(30)); assertThat(geoData.get("ip"), equalTo(ip)); + assertThat(geoData.get("country_confidence"), equalTo(99)); + assertThat(geoData.get("country_in_european_union"), equalTo(false)); assertThat(geoData.get("country_iso_code"), equalTo("US")); assertThat(geoData.get("country_name"), equalTo("United States")); assertThat(geoData.get("continent_code"), equalTo("NA")); assertThat(geoData.get("continent_name"), equalTo("North America")); assertThat(geoData.get("region_iso_code"), equalTo("US-NY")); assertThat(geoData.get("region_name"), equalTo("New York")); + assertThat(geoData.get("city_confidence"), equalTo(11)); assertThat(geoData.get("city_name"), equalTo("Chatham")); assertThat(geoData.get("timezone"), equalTo("America/New_York")); assertThat(geoData.get("location"), equalTo(Map.of("lat", 42.3478, "lon", -73.5549))); + assertThat(geoData.get("accuracy_radius"), equalTo(27)); assertThat(geoData.get("postal_code"), equalTo("12037")); + assertThat(geoData.get("city_confidence"), equalTo(11)); assertThat(geoData.get("asn"), equalTo(14671L)); assertThat(geoData.get("organization_name"), equalTo("FairPoint Communications")); assertThat(geoData.get("network"), equalTo("74.209.16.0/20")); diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxMindSupportTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxMindSupportTests.java index 7a3de6ca199a..1e05cf2b3ba3 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxMindSupportTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxMindSupportTests.java @@ -78,8 +78,10 @@ public class MaxMindSupportTests extends ESTestCase { "city.name", "continent.code", "continent.name", + "country.inEuropeanUnion", "country.isoCode", "country.name", + "location.accuracyRadius", "location.latitude", "location.longitude", "location.timeZone", @@ -95,14 +97,12 @@ public class MaxMindSupportTests extends ESTestCase { "continent.names", "country.confidence", "country.geoNameId", - "country.inEuropeanUnion", "country.names", "leastSpecificSubdivision.confidence", "leastSpecificSubdivision.geoNameId", "leastSpecificSubdivision.isoCode", "leastSpecificSubdivision.name", "leastSpecificSubdivision.names", - "location.accuracyRadius", "location.averageIncome", "location.metroCode", "location.populationDensity", @@ -159,6 +159,7 @@ public class MaxMindSupportTests extends ESTestCase { private static final Set COUNTRY_SUPPORTED_FIELDS = Set.of( "continent.name", + "country.inEuropeanUnion", "country.isoCode", "continent.code", "country.name" @@ -168,7 +169,6 @@ public class MaxMindSupportTests extends ESTestCase { "continent.names", "country.confidence", "country.geoNameId", - "country.inEuropeanUnion", "country.names", "maxMind", "registeredCountry.confidence", @@ -213,17 +213,22 @@ public class MaxMindSupportTests extends ESTestCase { private static final Set DOMAIN_UNSUPPORTED_FIELDS = Set.of("ipAddress", "network"); private static final Set ENTERPRISE_SUPPORTED_FIELDS = Set.of( + "city.confidence", "city.name", "continent.code", "continent.name", + "country.confidence", + "country.inEuropeanUnion", "country.isoCode", "country.name", + "location.accuracyRadius", "location.latitude", "location.longitude", "location.timeZone", "mostSpecificSubdivision.isoCode", "mostSpecificSubdivision.name", "postal.code", + "postal.confidence", "traits.anonymous", "traits.anonymousVpn", "traits.autonomousSystemNumber", @@ -242,21 +247,17 @@ public class MaxMindSupportTests extends ESTestCase { "traits.userType" ); private static final Set ENTERPRISE_UNSUPPORTED_FIELDS = Set.of( - "city.confidence", "city.geoNameId", "city.names", "continent.geoNameId", "continent.names", - "country.confidence", "country.geoNameId", - "country.inEuropeanUnion", "country.names", "leastSpecificSubdivision.confidence", "leastSpecificSubdivision.geoNameId", "leastSpecificSubdivision.isoCode", "leastSpecificSubdivision.name", "leastSpecificSubdivision.names", - "location.accuracyRadius", "location.averageIncome", "location.metroCode", "location.populationDensity", @@ -264,7 +265,6 @@ public class MaxMindSupportTests extends ESTestCase { "mostSpecificSubdivision.confidence", "mostSpecificSubdivision.geoNameId", "mostSpecificSubdivision.names", - "postal.confidence", "registeredCountry.confidence", "registeredCountry.geoNameId", "registeredCountry.inEuropeanUnion", From 44f379189925e9738873196f7c86788085ab0f98 Mon Sep 17 00:00:00 2001 From: Panagiotis Bailis Date: Tue, 8 Oct 2024 08:27:06 +0300 Subject: [PATCH 24/85] Updating toXContent implementation for retrievers (#114017) --- .../search/retriever/RetrieverBuilder.java | 8 +++ .../builder/SearchSourceBuilderTests.java | 71 +++++++++++++++++++ .../KnnRetrieverBuilderParsingTests.java | 2 +- .../StandardRetrieverBuilderParsingTests.java | 2 +- .../random/RandomRankRetrieverBuilder.java | 5 +- .../TextSimilarityRankRetrieverBuilder.java | 8 +-- .../RandomRankRetrieverBuilderTests.java | 10 ++- ...xtSimilarityRankRetrieverBuilderTests.java | 46 +++++++++++- .../xpack/rank/rrf/RRFRetrieverBuilder.java | 3 - .../rrf/RRFRetrieverBuilderParsingTests.java | 57 ++++++++++++++- 10 files changed, 187 insertions(+), 25 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/retriever/RetrieverBuilder.java b/server/src/main/java/org/elasticsearch/search/retriever/RetrieverBuilder.java index 1328106896bc..1c6f8c4a7ce4 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/RetrieverBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/RetrieverBuilder.java @@ -251,11 +251,19 @@ public abstract class RetrieverBuilder implements Rewriteable, @Override public final XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { builder.startObject(); + builder.startObject(getName()); if (preFilterQueryBuilders.isEmpty() == false) { builder.field(PRE_FILTER_FIELD.getPreferredName(), preFilterQueryBuilders); } + if (minScore != null) { + builder.field(MIN_SCORE_FIELD.getPreferredName(), minScore); + } + if (retrieverName != null) { + builder.field(NAME_FIELD.getPreferredName(), retrieverName); + } doToXContent(builder, params); builder.endObject(); + builder.endObject(); return builder; } diff --git a/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java b/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java index 3f33bbfe6f6c..240a677f4cbf 100644 --- a/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java @@ -41,6 +41,8 @@ import org.elasticsearch.search.collapse.CollapseBuilder; import org.elasticsearch.search.collapse.CollapseBuilderTests; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.rescore.QueryRescorerBuilder; +import org.elasticsearch.search.retriever.KnnRetrieverBuilder; +import org.elasticsearch.search.retriever.StandardRetrieverBuilder; import org.elasticsearch.search.slice.SliceBuilder; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.ScoreSortBuilder; @@ -600,6 +602,75 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase { } } + public void testStandardRetrieverParsing() throws IOException { + String restContent = "{" + + " \"retriever\": {" + + " \"standard\": {" + + " \"query\": {" + + " \"match_all\": {}" + + " }," + + " \"min_score\": 10," + + " \"_name\": \"foo_standard\"" + + " }" + + " }" + + "}"; + SearchUsageHolder searchUsageHolder = new UsageService().getSearchUsageHolder(); + try (XContentParser jsonParser = createParser(JsonXContent.jsonXContent, restContent)) { + SearchSourceBuilder source = new SearchSourceBuilder().parseXContent(jsonParser, true, searchUsageHolder, nf -> true); + assertThat(source.retriever(), instanceOf(StandardRetrieverBuilder.class)); + StandardRetrieverBuilder parsed = (StandardRetrieverBuilder) source.retriever(); + assertThat(parsed.minScore(), equalTo(10f)); + assertThat(parsed.retrieverName(), equalTo("foo_standard")); + try (XContentParser parseSerialized = createParser(JsonXContent.jsonXContent, Strings.toString(source))) { + SearchSourceBuilder deserializedSource = new SearchSourceBuilder().parseXContent( + parseSerialized, + true, + searchUsageHolder, + nf -> true + ); + assertThat(deserializedSource.retriever(), instanceOf(StandardRetrieverBuilder.class)); + StandardRetrieverBuilder deserialized = (StandardRetrieverBuilder) source.retriever(); + assertThat(parsed, equalTo(deserialized)); + } + } + } + + public void testKnnRetrieverParsing() throws IOException { + String restContent = "{" + + " \"retriever\": {" + + " \"knn\": {" + + " \"query_vector\": [" + + " 3" + + " ]," + + " \"field\": \"vector\"," + + " \"k\": 10," + + " \"num_candidates\": 15," + + " \"min_score\": 10," + + " \"_name\": \"foo_knn\"" + + " }" + + " }" + + "}"; + SearchUsageHolder searchUsageHolder = new UsageService().getSearchUsageHolder(); + try (XContentParser jsonParser = createParser(JsonXContent.jsonXContent, restContent)) { + SearchSourceBuilder source = new SearchSourceBuilder().parseXContent(jsonParser, true, searchUsageHolder, nf -> true); + assertThat(source.retriever(), instanceOf(KnnRetrieverBuilder.class)); + KnnRetrieverBuilder parsed = (KnnRetrieverBuilder) source.retriever(); + assertThat(parsed.minScore(), equalTo(10f)); + assertThat(parsed.retrieverName(), equalTo("foo_knn")); + try (XContentParser parseSerialized = createParser(JsonXContent.jsonXContent, Strings.toString(source))) { + SearchSourceBuilder deserializedSource = new SearchSourceBuilder().parseXContent( + parseSerialized, + true, + searchUsageHolder, + nf -> true + ); + assertThat(deserializedSource.retriever(), instanceOf(KnnRetrieverBuilder.class)); + KnnRetrieverBuilder deserialized = (KnnRetrieverBuilder) source.retriever(); + assertThat(parsed, equalTo(deserialized)); + } + } + } + public void testStoredFieldsUsage() throws IOException { Set storedFieldRestVariations = Set.of( "{\"stored_fields\" : [\"_none_\"]}", diff --git a/server/src/test/java/org/elasticsearch/search/retriever/KnnRetrieverBuilderParsingTests.java b/server/src/test/java/org/elasticsearch/search/retriever/KnnRetrieverBuilderParsingTests.java index f3dd86e0b1fa..b0bf7e663649 100644 --- a/server/src/test/java/org/elasticsearch/search/retriever/KnnRetrieverBuilderParsingTests.java +++ b/server/src/test/java/org/elasticsearch/search/retriever/KnnRetrieverBuilderParsingTests.java @@ -74,7 +74,7 @@ public class KnnRetrieverBuilderParsingTests extends AbstractXContentTestCase TextSimilarityRankRetrieverBuilder.PARSER.apply(p, (RetrieverParserContext) c) + new ParseField(RandomRankBuilder.NAME), + (p, c) -> RandomRankRetrieverBuilder.PARSER.apply(p, (RetrieverParserContext) c) ) ); return new NamedXContentRegistry(entries); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilderTests.java index 1a72cb0da289..140b181a42a0 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilderTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.inference.rank.textsimilarity; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.common.Strings; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.MatchNoneQueryBuilder; @@ -25,6 +26,8 @@ import org.elasticsearch.search.retriever.TestRetrieverBuilder; import org.elasticsearch.test.AbstractXContentTestCase; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.usage.SearchUsage; +import org.elasticsearch.usage.SearchUsageHolder; +import org.elasticsearch.usage.UsageService; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; @@ -72,8 +75,8 @@ public class TextSimilarityRankRetrieverBuilderTests extends AbstractXContentTes } @Override - protected TextSimilarityRankRetrieverBuilder doParseInstance(XContentParser parser) { - return TextSimilarityRankRetrieverBuilder.PARSER.apply( + protected TextSimilarityRankRetrieverBuilder doParseInstance(XContentParser parser) throws IOException { + return (TextSimilarityRankRetrieverBuilder) RetrieverBuilder.parseTopLevelRetrieverBuilder( parser, new RetrieverParserContext( new SearchUsage(), @@ -208,6 +211,45 @@ public class TextSimilarityRankRetrieverBuilderTests extends AbstractXContentTes } } + public void testTextSimilarityRetrieverParsing() throws IOException { + String restContent = "{" + + " \"retriever\": {" + + " \"text_similarity_reranker\": {" + + " \"retriever\": {" + + " \"test\": {" + + " \"value\": \"my-test-retriever\"" + + " }" + + " }," + + " \"field\": \"my-field\"," + + " \"inference_id\": \"my-inference-id\"," + + " \"inference_text\": \"my-inference-text\"," + + " \"rank_window_size\": 100," + + " \"min_score\": 20.0," + + " \"_name\": \"foo_reranker\"" + + " }" + + " }" + + "}"; + SearchUsageHolder searchUsageHolder = new UsageService().getSearchUsageHolder(); + try (XContentParser jsonParser = createParser(JsonXContent.jsonXContent, restContent)) { + SearchSourceBuilder source = new SearchSourceBuilder().parseXContent(jsonParser, true, searchUsageHolder, nf -> true); + assertThat(source.retriever(), instanceOf(TextSimilarityRankRetrieverBuilder.class)); + TextSimilarityRankRetrieverBuilder parsed = (TextSimilarityRankRetrieverBuilder) source.retriever(); + assertThat(parsed.minScore(), equalTo(20f)); + assertThat(parsed.retrieverName(), equalTo("foo_reranker")); + try (XContentParser parseSerialized = createParser(JsonXContent.jsonXContent, Strings.toString(source))) { + SearchSourceBuilder deserializedSource = new SearchSourceBuilder().parseXContent( + parseSerialized, + true, + searchUsageHolder, + nf -> true + ); + assertThat(deserializedSource.retriever(), instanceOf(TextSimilarityRankRetrieverBuilder.class)); + TextSimilarityRankRetrieverBuilder deserialized = (TextSimilarityRankRetrieverBuilder) source.retriever(); + assertThat(parsed, equalTo(deserialized)); + } + } + } + public void testIsCompound() { RetrieverBuilder compoundInnerRetriever = new TestRetrieverBuilder(ESTestCase.randomAlphaOfLengthBetween(5, 10)) { @Override diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java index 496af9957443..5f19e361d857 100644 --- a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java @@ -180,10 +180,7 @@ public final class RRFRetrieverBuilder extends CompoundRetrieverBuilder { /** @@ -53,7 +61,10 @@ public class RRFRetrieverBuilderParsingTests extends AbstractXContentTestCase true)); + return (RRFRetrieverBuilder) RetrieverBuilder.parseTopLevelRetrieverBuilder( + parser, + new RetrieverParserContext(new SearchUsage(), nf -> true) + ); } @Override @@ -81,4 +92,48 @@ public class RRFRetrieverBuilderParsingTests extends AbstractXContentTestCase true); + assertThat(source.retriever(), instanceOf(RRFRetrieverBuilder.class)); + RRFRetrieverBuilder parsed = (RRFRetrieverBuilder) source.retriever(); + assertThat(parsed.minScore(), equalTo(20f)); + assertThat(parsed.retrieverName(), equalTo("foo_rrf")); + try (XContentParser parseSerialized = createParser(JsonXContent.jsonXContent, Strings.toString(source))) { + SearchSourceBuilder deserializedSource = new SearchSourceBuilder().parseXContent( + parseSerialized, + true, + searchUsageHolder, + nf -> true + ); + assertThat(deserializedSource.retriever(), instanceOf(RRFRetrieverBuilder.class)); + RRFRetrieverBuilder deserialized = (RRFRetrieverBuilder) source.retriever(); + assertThat(parsed, equalTo(deserialized)); + } + } + } } From 740cb2e0c78baf0a746563864674cbb2f0ff75af Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 8 Oct 2024 06:59:30 +0100 Subject: [PATCH 25/85] Document that `?wait_for_active_shards=0` is permitted (#114091) Today the docs for the `?wait_for_active_shards` parameter say that it must be a positive integer, proscribing `0`, yet `0` is a legitimate value for this parameter. This commit fixes this point and rewords the docs slightly for clarity. --- docs/reference/rest-api/common-parms.asciidoc | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/docs/reference/rest-api/common-parms.asciidoc b/docs/reference/rest-api/common-parms.asciidoc index fabd495cdc52..993bb8cb894f 100644 --- a/docs/reference/rest-api/common-parms.asciidoc +++ b/docs/reference/rest-api/common-parms.asciidoc @@ -1298,10 +1298,11 @@ tag::wait_for_active_shards[] `wait_for_active_shards`:: + -- -(Optional, string) The number of shard copies that must be active before -proceeding with the operation. Set to `all` or any positive integer up -to the total number of shards in the index (`number_of_replicas+1`). -Default: 1, the primary shard. +(Optional, string) The number of copies of each shard that must be active +before proceeding with the operation. Set to `all` or any non-negative integer +up to the total number of copies of each shard in the index +(`number_of_replicas+1`). Defaults to `1`, meaning to wait just for each +primary shard to be active. See <>. -- From 1c40954037596995911dc42701f9e2f9fb230dd2 Mon Sep 17 00:00:00 2001 From: Nick Tindall Date: Tue, 8 Oct 2024 17:01:42 +1100 Subject: [PATCH 26/85] Publish APM metrics from the Azure BlobStore (#113913) Closes ES-9550 --- .../AzureBlobStoreRepositoryMetricsTests.java | 468 ++++++++++++++++++ .../azure/AzureBlobStoreRepositoryTests.java | 119 ++++- .../repositories/azure/AzureBlobStore.java | 149 +++--- .../azure/AzureClientProvider.java | 135 ++++- .../repositories/azure/AzureRepository.java | 8 +- .../azure/AzureRepositoryPlugin.java | 10 +- .../azure/AzureStorageService.java | 4 +- .../azure/AbstractAzureServerTestCase.java | 6 +- .../azure/AzureClientProviderTests.java | 6 +- .../azure/AzureRepositorySettingsTests.java | 4 +- .../repositories/s3/S3BlobStore.java | 12 +- .../repositories/RepositoriesMetrics.java | 78 +++ .../org/elasticsearch/rest/RestStatus.java | 12 + 13 files changed, 913 insertions(+), 98 deletions(-) create mode 100644 modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryMetricsTests.java diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryMetricsTests.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryMetricsTests.java new file mode 100644 index 000000000000..a9bf0afa37e1 --- /dev/null +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryMetricsTests.java @@ -0,0 +1,468 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.repositories.azure; + +import com.sun.net.httpserver.HttpExchange; +import com.sun.net.httpserver.HttpHandler; + +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.OperationPurpose; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.plugins.PluginsService; +import org.elasticsearch.repositories.RepositoriesMetrics; +import org.elasticsearch.repositories.RepositoriesService; +import org.elasticsearch.repositories.blobstore.BlobStoreRepository; +import org.elasticsearch.repositories.blobstore.RequestedRangeNotSatisfiedException; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.telemetry.Measurement; +import org.elasticsearch.telemetry.TestTelemetryPlugin; +import org.junit.After; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Map; +import java.util.Queue; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; +import java.util.function.Consumer; +import java.util.function.Predicate; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import static org.elasticsearch.repositories.azure.AbstractAzureServerTestCase.randomBlobContent; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.lessThanOrEqualTo; + +@SuppressForbidden(reason = "we use a HttpServer to emulate Azure") +public class AzureBlobStoreRepositoryMetricsTests extends AzureBlobStoreRepositoryTests { + + private static final Predicate GET_BLOB_REQUEST_PREDICATE = request -> GET_BLOB_PATTERN.test( + request.getRequestMethod() + " " + request.getRequestURI() + ); + private static final int MAX_RETRIES = 3; + + private final Queue requestHandlers = new ConcurrentLinkedQueue<>(); + + @Override + protected Map createHttpHandlers() { + Map httpHandlers = super.createHttpHandlers(); + assert httpHandlers.size() == 1 : "This assumes there's a single handler"; + return httpHandlers.entrySet() + .stream() + .collect(Collectors.toMap(Map.Entry::getKey, e -> new ResponseInjectingAzureHttpHandler(requestHandlers, e.getValue()))); + } + + /** + * We want to control the errors in this test + */ + @Override + protected HttpHandler createErroneousHttpHandler(HttpHandler delegate) { + return delegate; + } + + @After + public void checkRequestHandlerQueue() { + if (requestHandlers.isEmpty() == false) { + fail("There were unused request handlers left in the queue, this is probably a broken test"); + } + } + + private static BlobContainer getBlobContainer(String dataNodeName, String repository) { + final var blobStoreRepository = (BlobStoreRepository) internalCluster().getInstance(RepositoriesService.class, dataNodeName) + .repository(repository); + return blobStoreRepository.blobStore().blobContainer(BlobPath.EMPTY.add(randomIdentifier())); + } + + public void testThrottleResponsesAreCountedInMetrics() throws IOException { + final String repository = createRepository(randomRepositoryName()); + final String dataNodeName = internalCluster().getNodeNameThat(DiscoveryNode::canContainData); + final BlobContainer blobContainer = getBlobContainer(dataNodeName, repository); + + // Create a blob + final String blobName = "index-" + randomIdentifier(); + final OperationPurpose purpose = randomFrom(OperationPurpose.values()); + blobContainer.writeBlob(purpose, blobName, BytesReference.fromByteBuffer(ByteBuffer.wrap(randomBlobContent())), false); + clearMetrics(dataNodeName); + + // Queue up some throttle responses + final int numThrottles = randomIntBetween(1, MAX_RETRIES); + IntStream.range(0, numThrottles).forEach(i -> requestHandlers.offer(new FixedRequestHandler(RestStatus.TOO_MANY_REQUESTS))); + + // Check that the blob exists + blobContainer.blobExists(purpose, blobName); + + // Correct metrics are recorded + metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB_PROPERTIES, repository).expectMetrics() + .withRequests(numThrottles + 1) + .withThrottles(numThrottles) + .withExceptions(numThrottles) + .forResult(MetricsAsserter.Result.Success); + } + + public void testRangeNotSatisfiedAreCountedInMetrics() throws IOException { + final String repository = createRepository(randomRepositoryName()); + final String dataNodeName = internalCluster().getNodeNameThat(DiscoveryNode::canContainData); + final BlobContainer blobContainer = getBlobContainer(dataNodeName, repository); + + // Create a blob + final String blobName = "index-" + randomIdentifier(); + final OperationPurpose purpose = randomFrom(OperationPurpose.values()); + blobContainer.writeBlob(purpose, blobName, BytesReference.fromByteBuffer(ByteBuffer.wrap(randomBlobContent())), false); + clearMetrics(dataNodeName); + + // Queue up a range-not-satisfied error + requestHandlers.offer(new FixedRequestHandler(RestStatus.REQUESTED_RANGE_NOT_SATISFIED, null, GET_BLOB_REQUEST_PREDICATE)); + + // Attempt to read the blob + assertThrows(RequestedRangeNotSatisfiedException.class, () -> blobContainer.readBlob(purpose, blobName)); + + // Correct metrics are recorded + metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB, repository).expectMetrics() + .withRequests(1) + .withThrottles(0) + .withExceptions(1) + .forResult(MetricsAsserter.Result.RangeNotSatisfied); + } + + public void testErrorResponsesAreCountedInMetrics() throws IOException { + final String repository = createRepository(randomRepositoryName()); + final String dataNodeName = internalCluster().getNodeNameThat(DiscoveryNode::canContainData); + final BlobContainer blobContainer = getBlobContainer(dataNodeName, repository); + + // Create a blob + final String blobName = "index-" + randomIdentifier(); + final OperationPurpose purpose = randomFrom(OperationPurpose.values()); + blobContainer.writeBlob(purpose, blobName, BytesReference.fromByteBuffer(ByteBuffer.wrap(randomBlobContent())), false); + clearMetrics(dataNodeName); + + // Queue some retry-able error responses + final int numErrors = randomIntBetween(1, MAX_RETRIES); + final AtomicInteger throttles = new AtomicInteger(); + IntStream.range(0, numErrors).forEach(i -> { + RestStatus status = randomFrom(RestStatus.INTERNAL_SERVER_ERROR, RestStatus.TOO_MANY_REQUESTS, RestStatus.SERVICE_UNAVAILABLE); + if (status == RestStatus.TOO_MANY_REQUESTS) { + throttles.incrementAndGet(); + } + requestHandlers.offer(new FixedRequestHandler(status)); + }); + + // Check that the blob exists + blobContainer.blobExists(purpose, blobName); + + // Correct metrics are recorded + metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB_PROPERTIES, repository).expectMetrics() + .withRequests(numErrors + 1) + .withThrottles(throttles.get()) + .withExceptions(numErrors) + .forResult(MetricsAsserter.Result.Success); + } + + public void testRequestFailuresAreCountedInMetrics() { + final String repository = createRepository(randomRepositoryName()); + final String dataNodeName = internalCluster().getNodeNameThat(DiscoveryNode::canContainData); + final BlobContainer blobContainer = getBlobContainer(dataNodeName, repository); + clearMetrics(dataNodeName); + + // Repeatedly cause a connection error to exhaust retries + IntStream.range(0, MAX_RETRIES + 1).forEach(i -> requestHandlers.offer((exchange, delegate) -> exchange.close())); + + // Hit the API + OperationPurpose purpose = randomFrom(OperationPurpose.values()); + assertThrows(IOException.class, () -> blobContainer.listBlobs(purpose)); + + // Correct metrics are recorded + metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.LIST_BLOBS, repository).expectMetrics() + .withRequests(4) + .withThrottles(0) + .withExceptions(4) + .forResult(MetricsAsserter.Result.Exception); + } + + public void testRequestTimeIsAccurate() throws IOException { + final String repository = createRepository(randomRepositoryName()); + final String dataNodeName = internalCluster().getNodeNameThat(DiscoveryNode::canContainData); + final BlobContainer blobContainer = getBlobContainer(dataNodeName, repository); + clearMetrics(dataNodeName); + + AtomicLong totalDelayMillis = new AtomicLong(0); + // Add some artificial delays + IntStream.range(0, randomIntBetween(1, MAX_RETRIES)).forEach(i -> { + long thisDelay = randomLongBetween(10, 100); + totalDelayMillis.addAndGet(thisDelay); + requestHandlers.offer((exchange, delegate) -> { + safeSleep(thisDelay); + // return a retry-able error + exchange.sendResponseHeaders(RestStatus.INTERNAL_SERVER_ERROR.getStatus(), -1); + }); + }); + + // Hit the API + final long startTimeMillis = System.currentTimeMillis(); + blobContainer.listBlobs(randomFrom(OperationPurpose.values())); + final long elapsedTimeMillis = System.currentTimeMillis() - startTimeMillis; + + List longHistogramMeasurement = getTelemetryPlugin(dataNodeName).getLongHistogramMeasurement( + RepositoriesMetrics.HTTP_REQUEST_TIME_IN_MILLIS_HISTOGRAM + ); + long recordedRequestTime = longHistogramMeasurement.get(0).getLong(); + // Request time should be >= the delays we simulated + assertThat(recordedRequestTime, greaterThanOrEqualTo(totalDelayMillis.get())); + // And <= the elapsed time for the request + assertThat(recordedRequestTime, lessThanOrEqualTo(elapsedTimeMillis)); + } + + private void clearMetrics(String discoveryNode) { + internalCluster().getInstance(PluginsService.class, discoveryNode) + .filterPlugins(TestTelemetryPlugin.class) + .forEach(TestTelemetryPlugin::resetMeter); + } + + private MetricsAsserter metricsAsserter( + String dataNodeName, + OperationPurpose operationPurpose, + AzureBlobStore.Operation operation, + String repository + ) { + return new MetricsAsserter(dataNodeName, operationPurpose, operation, repository); + } + + private class MetricsAsserter { + private final String dataNodeName; + private final OperationPurpose purpose; + private final AzureBlobStore.Operation operation; + private final String repository; + + enum Result { + Success, + Failure, + RangeNotSatisfied, + Exception + } + + enum MetricType { + LongHistogram { + @Override + List getMeasurements(TestTelemetryPlugin testTelemetryPlugin, String name) { + return testTelemetryPlugin.getLongHistogramMeasurement(name); + } + }, + LongCounter { + @Override + List getMeasurements(TestTelemetryPlugin testTelemetryPlugin, String name) { + return testTelemetryPlugin.getLongCounterMeasurement(name); + } + }; + + abstract List getMeasurements(TestTelemetryPlugin testTelemetryPlugin, String name); + } + + private MetricsAsserter(String dataNodeName, OperationPurpose purpose, AzureBlobStore.Operation operation, String repository) { + this.dataNodeName = dataNodeName; + this.purpose = purpose; + this.operation = operation; + this.repository = repository; + } + + private class Expectations { + private int expectedRequests; + private int expectedThrottles; + private int expectedExceptions; + + public Expectations withRequests(int expectedRequests) { + this.expectedRequests = expectedRequests; + return this; + } + + public Expectations withThrottles(int expectedThrottles) { + this.expectedThrottles = expectedThrottles; + return this; + } + + public Expectations withExceptions(int expectedExceptions) { + this.expectedExceptions = expectedExceptions; + return this; + } + + public void forResult(Result result) { + assertMetricsRecorded(expectedRequests, expectedThrottles, expectedExceptions, result); + } + } + + Expectations expectMetrics() { + return new Expectations(); + } + + private void assertMetricsRecorded(int expectedRequests, int expectedThrottles, int expectedExceptions, Result result) { + assertIntMetricRecorded(MetricType.LongCounter, RepositoriesMetrics.METRIC_OPERATIONS_TOTAL, 1); + assertIntMetricRecorded(MetricType.LongCounter, RepositoriesMetrics.METRIC_REQUESTS_TOTAL, expectedRequests); + + if (expectedThrottles > 0) { + assertIntMetricRecorded(MetricType.LongCounter, RepositoriesMetrics.METRIC_THROTTLES_TOTAL, expectedThrottles); + assertIntMetricRecorded(MetricType.LongHistogram, RepositoriesMetrics.METRIC_THROTTLES_HISTOGRAM, expectedThrottles); + } else { + assertNoMetricRecorded(MetricType.LongCounter, RepositoriesMetrics.METRIC_THROTTLES_TOTAL); + assertNoMetricRecorded(MetricType.LongHistogram, RepositoriesMetrics.METRIC_THROTTLES_HISTOGRAM); + } + + if (expectedExceptions > 0) { + assertIntMetricRecorded(MetricType.LongCounter, RepositoriesMetrics.METRIC_EXCEPTIONS_TOTAL, expectedExceptions); + assertIntMetricRecorded(MetricType.LongHistogram, RepositoriesMetrics.METRIC_EXCEPTIONS_HISTOGRAM, expectedExceptions); + } else { + assertNoMetricRecorded(MetricType.LongCounter, RepositoriesMetrics.METRIC_EXCEPTIONS_TOTAL); + assertNoMetricRecorded(MetricType.LongHistogram, RepositoriesMetrics.METRIC_EXCEPTIONS_HISTOGRAM); + } + + if (result == Result.RangeNotSatisfied || result == Result.Failure || result == Result.Exception) { + assertIntMetricRecorded(MetricType.LongCounter, RepositoriesMetrics.METRIC_UNSUCCESSFUL_OPERATIONS_TOTAL, 1); + } else { + assertNoMetricRecorded(MetricType.LongCounter, RepositoriesMetrics.METRIC_UNSUCCESSFUL_OPERATIONS_TOTAL); + } + + if (result == Result.RangeNotSatisfied) { + assertIntMetricRecorded(MetricType.LongCounter, RepositoriesMetrics.METRIC_EXCEPTIONS_REQUEST_RANGE_NOT_SATISFIED_TOTAL, 1); + } else { + assertNoMetricRecorded(MetricType.LongCounter, RepositoriesMetrics.METRIC_EXCEPTIONS_REQUEST_RANGE_NOT_SATISFIED_TOTAL); + } + + assertMatchingMetricRecorded( + MetricType.LongHistogram, + RepositoriesMetrics.HTTP_REQUEST_TIME_IN_MILLIS_HISTOGRAM, + m -> assertThat("No request time metric found", m.getLong(), greaterThanOrEqualTo(0L)) + ); + } + + private void assertIntMetricRecorded(MetricType metricType, String metricName, int expectedValue) { + assertMatchingMetricRecorded( + metricType, + metricName, + measurement -> assertEquals("Unexpected value for " + metricType + " " + metricName, expectedValue, measurement.getLong()) + ); + } + + private void assertNoMetricRecorded(MetricType metricType, String metricName) { + assertThat( + "Expected no values for " + metricType + " " + metricName, + metricType.getMeasurements(getTelemetryPlugin(dataNodeName), metricName), + hasSize(0) + ); + } + + private void assertMatchingMetricRecorded(MetricType metricType, String metricName, Consumer assertion) { + List measurements = metricType.getMeasurements(getTelemetryPlugin(dataNodeName), metricName); + Measurement measurement = measurements.stream() + .filter( + m -> m.attributes().get("operation").equals(operation.getKey()) + && m.attributes().get("purpose").equals(purpose.getKey()) + && m.attributes().get("repo_name").equals(repository) + && m.attributes().get("repo_type").equals("azure") + ) + .findFirst() + .orElseThrow( + () -> new IllegalStateException( + "No metric found with name=" + + metricName + + " and operation=" + + operation.getKey() + + " and purpose=" + + purpose.getKey() + + " and repo_name=" + + repository + + " in " + + measurements + ) + ); + + assertion.accept(measurement); + } + } + + @SuppressForbidden(reason = "we use a HttpServer to emulate Azure") + private static class ResponseInjectingAzureHttpHandler implements DelegatingHttpHandler { + + private final HttpHandler delegate; + private final Queue requestHandlerQueue; + + ResponseInjectingAzureHttpHandler(Queue requestHandlerQueue, HttpHandler delegate) { + this.delegate = delegate; + this.requestHandlerQueue = requestHandlerQueue; + } + + @Override + public void handle(HttpExchange exchange) throws IOException { + RequestHandler nextHandler = requestHandlerQueue.peek(); + if (nextHandler != null && nextHandler.matchesRequest(exchange)) { + requestHandlerQueue.poll().writeResponse(exchange, delegate); + } else { + delegate.handle(exchange); + } + } + + @Override + public HttpHandler getDelegate() { + return delegate; + } + } + + @SuppressForbidden(reason = "we use a HttpServer to emulate Azure") + @FunctionalInterface + private interface RequestHandler { + void writeResponse(HttpExchange exchange, HttpHandler delegate) throws IOException; + + default boolean matchesRequest(HttpExchange exchange) { + return true; + } + } + + @SuppressForbidden(reason = "we use a HttpServer to emulate Azure") + private static class FixedRequestHandler implements RequestHandler { + + private final RestStatus status; + private final String responseBody; + private final Predicate requestMatcher; + + FixedRequestHandler(RestStatus status) { + this(status, null, req -> true); + } + + /** + * Create a handler that only gets executed for requests that match the supplied predicate. Note + * that because the errors are stored in a queue this will prevent any subsequently queued errors from + * being returned until after it returns. + */ + FixedRequestHandler(RestStatus status, String responseBody, Predicate requestMatcher) { + this.status = status; + this.responseBody = responseBody; + this.requestMatcher = requestMatcher; + } + + @Override + public boolean matchesRequest(HttpExchange exchange) { + return requestMatcher.test(exchange); + } + + @Override + public void writeResponse(HttpExchange exchange, HttpHandler delegateHandler) throws IOException { + if (responseBody != null) { + byte[] responseBytes = responseBody.getBytes(StandardCharsets.UTF_8); + exchange.sendResponseHeaders(status.getStatus(), responseBytes.length); + exchange.getResponseBody().write(responseBytes); + } else { + exchange.sendResponseHeaders(status.getStatus(), -1); + } + } + } +} diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java index 1b7628cc0ad8..473d91da6e34 100644 --- a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java @@ -16,11 +16,13 @@ import com.sun.net.httpserver.Headers; import com.sun.net.httpserver.HttpExchange; import com.sun.net.httpserver.HttpHandler; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.MockSecureSettings; @@ -30,8 +32,15 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.PluginsService; +import org.elasticsearch.repositories.RepositoriesService; +import org.elasticsearch.repositories.RepositoryMissingException; +import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.repositories.blobstore.ESMockAPIBasedRepositoryIntegTestCase; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.telemetry.Measurement; +import org.elasticsearch.telemetry.TestTelemetryPlugin; +import org.elasticsearch.test.BackgroundIndexer; import java.io.ByteArrayInputStream; import java.io.IOException; @@ -41,22 +50,33 @@ import java.util.ArrayList; import java.util.Base64; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.atomic.LongAdder; import java.util.function.Predicate; import java.util.regex.Pattern; +import java.util.stream.Collectors; +import static org.elasticsearch.repositories.RepositoriesMetrics.METRIC_OPERATIONS_TOTAL; import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.is; @SuppressForbidden(reason = "this test uses a HttpServer to emulate an Azure endpoint") public class AzureBlobStoreRepositoryTests extends ESMockAPIBasedRepositoryIntegTestCase { - private static final String DEFAULT_ACCOUNT_NAME = "account"; + protected static final String DEFAULT_ACCOUNT_NAME = "account"; + protected static final Predicate LIST_PATTERN = Pattern.compile("GET /[a-zA-Z0-9]+/[a-zA-Z0-9]+\\?.+").asMatchPredicate(); + protected static final Predicate GET_BLOB_PATTERN = Pattern.compile("GET /[a-zA-Z0-9]+/[a-zA-Z0-9]+/.+").asMatchPredicate(); @Override protected String repositoryType() { @@ -78,7 +98,7 @@ public class AzureBlobStoreRepositoryTests extends ESMockAPIBasedRepositoryInteg @Override protected Collection> nodePlugins() { - return Collections.singletonList(TestAzureRepositoryPlugin.class); + return List.of(TestAzureRepositoryPlugin.class, TestTelemetryPlugin.class); } @Override @@ -91,7 +111,7 @@ public class AzureBlobStoreRepositoryTests extends ESMockAPIBasedRepositoryInteg @Override protected HttpHandler createErroneousHttpHandler(final HttpHandler delegate) { - return new AzureErroneousHttpHandler(delegate, AzureStorageSettings.DEFAULT_MAX_RETRIES); + return new AzureHTTPStatsCollectorHandler(new AzureErroneousHttpHandler(delegate, AzureStorageSettings.DEFAULT_MAX_RETRIES)); } @Override @@ -119,6 +139,13 @@ public class AzureBlobStoreRepositoryTests extends ESMockAPIBasedRepositoryInteg .build(); } + protected TestTelemetryPlugin getTelemetryPlugin(String dataNodeName) { + return internalCluster().getInstance(PluginsService.class, dataNodeName) + .filterPlugins(TestTelemetryPlugin.class) + .findFirst() + .orElseThrow(); + } + /** * AzureRepositoryPlugin that allows to set low values for the Azure's client retry policy * and for BlobRequestOptions#getSingleBlobPutThresholdInBytes(). @@ -195,9 +222,6 @@ public class AzureBlobStoreRepositoryTests extends ESMockAPIBasedRepositoryInteg */ @SuppressForbidden(reason = "this test uses a HttpServer to emulate an Azure endpoint") private static class AzureHTTPStatsCollectorHandler extends HttpStatsCollectorHandler { - private static final Predicate LIST_PATTERN = Pattern.compile("GET /[a-zA-Z0-9]+/[a-zA-Z0-9]+\\?.+").asMatchPredicate(); - private static final Predicate GET_BLOB_PATTERN = Pattern.compile("GET /[a-zA-Z0-9]+/[a-zA-Z0-9]+/.+").asMatchPredicate(); - private final Set seenRequestIds = ConcurrentCollections.newConcurrentSet(); private AzureHTTPStatsCollectorHandler(HttpHandler delegate) { @@ -303,4 +327,87 @@ public class AzureBlobStoreRepositoryTests extends ESMockAPIBasedRepositoryInteg container.delete(randomPurpose()); } } + + public void testMetrics() throws Exception { + // Reset all the metrics so there's none lingering from previous tests + internalCluster().getInstances(PluginsService.class) + .forEach(ps -> ps.filterPlugins(TestTelemetryPlugin.class).forEach(TestTelemetryPlugin::resetMeter)); + + // Create the repository and perform some activities + final String repository = createRepository(randomRepositoryName(), false); + final String index = "index-no-merges"; + createIndex(index, 1, 0); + + final long nbDocs = randomLongBetween(10_000L, 20_000L); + try (BackgroundIndexer indexer = new BackgroundIndexer(index, client(), (int) nbDocs)) { + waitForDocs(nbDocs, indexer); + } + flushAndRefresh(index); + BroadcastResponse forceMerge = client().admin().indices().prepareForceMerge(index).setFlush(true).setMaxNumSegments(1).get(); + assertThat(forceMerge.getSuccessfulShards(), equalTo(1)); + assertHitCount(prepareSearch(index).setSize(0).setTrackTotalHits(true), nbDocs); + + final String snapshot = "snapshot"; + assertSuccessfulSnapshot( + clusterAdmin().prepareCreateSnapshot(TEST_REQUEST_TIMEOUT, repository, snapshot).setWaitForCompletion(true).setIndices(index) + ); + assertAcked(client().admin().indices().prepareDelete(index)); + assertSuccessfulRestore( + clusterAdmin().prepareRestoreSnapshot(TEST_REQUEST_TIMEOUT, repository, snapshot).setWaitForCompletion(true) + ); + ensureGreen(index); + assertHitCount(prepareSearch(index).setSize(0).setTrackTotalHits(true), nbDocs); + assertAcked(clusterAdmin().prepareDeleteSnapshot(TEST_REQUEST_TIMEOUT, repository, snapshot).get()); + + final Map aggregatedMetrics = new HashMap<>(); + // Compare collected stats and metrics for each node and they should be the same + for (var nodeName : internalCluster().getNodeNames()) { + final BlobStoreRepository blobStoreRepository; + try { + blobStoreRepository = (BlobStoreRepository) internalCluster().getInstance(RepositoriesService.class, nodeName) + .repository(repository); + } catch (RepositoryMissingException e) { + continue; + } + + final AzureBlobStore blobStore = (AzureBlobStore) blobStoreRepository.blobStore(); + final Map statsCollectors = blobStore.getMetricsRecorder().opsCounters; + + final List metrics = Measurement.combine( + getTelemetryPlugin(nodeName).getLongCounterMeasurement(METRIC_OPERATIONS_TOTAL) + ); + + assertThat( + statsCollectors.keySet().stream().map(AzureBlobStore.StatsKey::operation).collect(Collectors.toSet()), + equalTo( + metrics.stream() + .map(m -> AzureBlobStore.Operation.fromKey((String) m.attributes().get("operation"))) + .collect(Collectors.toSet()) + ) + ); + metrics.forEach(metric -> { + assertThat( + metric.attributes(), + allOf(hasEntry("repo_type", AzureRepository.TYPE), hasKey("repo_name"), hasKey("operation"), hasKey("purpose")) + ); + final AzureBlobStore.Operation operation = AzureBlobStore.Operation.fromKey((String) metric.attributes().get("operation")); + final AzureBlobStore.StatsKey statsKey = new AzureBlobStore.StatsKey( + operation, + OperationPurpose.parse((String) metric.attributes().get("purpose")) + ); + assertThat(nodeName + "/" + statsKey + " exists", statsCollectors, hasKey(statsKey)); + assertThat(nodeName + "/" + statsKey + " has correct sum", metric.getLong(), equalTo(statsCollectors.get(statsKey).sum())); + aggregatedMetrics.compute(statsKey.operation(), (k, v) -> v == null ? metric.getLong() : v + metric.getLong()); + }); + } + + // Metrics number should be consistent with server side request count as well. + assertThat(aggregatedMetrics, equalTo(getServerMetrics())); + } + + private Map getServerMetrics() { + return getMockRequestCounts().entrySet() + .stream() + .collect(Collectors.toMap(e -> AzureBlobStore.Operation.fromKey(e.getKey()), Map.Entry::getValue)); + } } diff --git a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java index 546698908212..d520d30f2bac 100644 --- a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java +++ b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java @@ -60,6 +60,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; +import org.elasticsearch.repositories.RepositoriesMetrics; import org.elasticsearch.repositories.azure.AzureRepository.Repository; import org.elasticsearch.repositories.blobstore.ChunkedBlobOutputStream; import org.elasticsearch.rest.RestStatus; @@ -86,11 +87,11 @@ import java.util.Objects; import java.util.Spliterator; import java.util.Spliterators; import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.LongAdder; import java.util.function.BiPredicate; -import java.util.function.Consumer; import java.util.stream.Collectors; import java.util.stream.StreamSupport; @@ -102,59 +103,54 @@ public class AzureBlobStore implements BlobStore { private static final int DEFAULT_UPLOAD_BUFFERS_SIZE = (int) new ByteSizeValue(64, ByteSizeUnit.KB).getBytes(); private final AzureStorageService service; - private final BigArrays bigArrays; + private final RepositoryMetadata repositoryMetadata; private final String clientName; private final String container; private final LocationMode locationMode; private final ByteSizeValue maxSinglePartUploadSize; - private final StatsCollectors statsCollectors = new StatsCollectors(); - private final AzureClientProvider.SuccessfulRequestHandler statsConsumer; + private final RequestMetricsRecorder requestMetricsRecorder; + private final AzureClientProvider.RequestMetricsHandler requestMetricsHandler; - public AzureBlobStore(RepositoryMetadata metadata, AzureStorageService service, BigArrays bigArrays) { + public AzureBlobStore( + RepositoryMetadata metadata, + AzureStorageService service, + BigArrays bigArrays, + RepositoriesMetrics repositoriesMetrics + ) { this.container = Repository.CONTAINER_SETTING.get(metadata.settings()); this.clientName = Repository.CLIENT_NAME.get(metadata.settings()); this.service = service; this.bigArrays = bigArrays; + this.requestMetricsRecorder = new RequestMetricsRecorder(repositoriesMetrics); + this.repositoryMetadata = metadata; // locationMode is set per repository, not per client this.locationMode = Repository.LOCATION_MODE_SETTING.get(metadata.settings()); this.maxSinglePartUploadSize = Repository.MAX_SINGLE_PART_UPLOAD_SIZE_SETTING.get(metadata.settings()); - List requestStatsCollectors = List.of( - RequestStatsCollector.create( - (httpMethod, url) -> httpMethod == HttpMethod.HEAD, - purpose -> statsCollectors.onSuccessfulRequest(Operation.GET_BLOB_PROPERTIES, purpose) - ), - RequestStatsCollector.create( + List requestMatchers = List.of( + new RequestMatcher((httpMethod, url) -> httpMethod == HttpMethod.HEAD, Operation.GET_BLOB_PROPERTIES), + new RequestMatcher( (httpMethod, url) -> httpMethod == HttpMethod.GET && isListRequest(httpMethod, url) == false, - purpose -> statsCollectors.onSuccessfulRequest(Operation.GET_BLOB, purpose) + Operation.GET_BLOB ), - RequestStatsCollector.create( - AzureBlobStore::isListRequest, - purpose -> statsCollectors.onSuccessfulRequest(Operation.LIST_BLOBS, purpose) - ), - RequestStatsCollector.create( - AzureBlobStore::isPutBlockRequest, - purpose -> statsCollectors.onSuccessfulRequest(Operation.PUT_BLOCK, purpose) - ), - RequestStatsCollector.create( - AzureBlobStore::isPutBlockListRequest, - purpose -> statsCollectors.onSuccessfulRequest(Operation.PUT_BLOCK_LIST, purpose) - ), - RequestStatsCollector.create( + new RequestMatcher(AzureBlobStore::isListRequest, Operation.LIST_BLOBS), + new RequestMatcher(AzureBlobStore::isPutBlockRequest, Operation.PUT_BLOCK), + new RequestMatcher(AzureBlobStore::isPutBlockListRequest, Operation.PUT_BLOCK_LIST), + new RequestMatcher( // https://docs.microsoft.com/en-us/rest/api/storageservices/put-blob#uri-parameters // The only URI parameter allowed for put-blob operation is "timeout", but if a sas token is used, // it's possible that the URI parameters contain additional parameters unrelated to the upload type. (httpMethod, url) -> httpMethod == HttpMethod.PUT && isPutBlockRequest(httpMethod, url) == false && isPutBlockListRequest(httpMethod, url) == false, - purpose -> statsCollectors.onSuccessfulRequest(Operation.PUT_BLOB, purpose) + Operation.PUT_BLOB ) ); - this.statsConsumer = (purpose, httpMethod, url) -> { + this.requestMetricsHandler = (purpose, method, url, metrics) -> { try { URI uri = url.toURI(); String path = uri.getPath() == null ? "" : uri.getPath(); @@ -167,9 +163,9 @@ public class AzureBlobStore implements BlobStore { return; } - for (RequestStatsCollector requestStatsCollector : requestStatsCollectors) { - if (requestStatsCollector.shouldConsumeRequestInfo(httpMethod, url)) { - requestStatsCollector.consumeHttpRequestInfo(purpose); + for (RequestMatcher requestMatcher : requestMatchers) { + if (requestMatcher.matches(method, url)) { + requestMetricsRecorder.onRequestComplete(requestMatcher.operation, purpose, metrics); return; } } @@ -665,12 +661,12 @@ public class AzureBlobStore implements BlobStore { } private AzureBlobServiceClient getAzureBlobServiceClientClient(OperationPurpose purpose) { - return service.client(clientName, locationMode, purpose, statsConsumer); + return service.client(clientName, locationMode, purpose, requestMetricsHandler); } @Override public Map stats() { - return statsCollectors.statsMap(service.isStateless()); + return requestMetricsRecorder.statsMap(service.isStateless()); } // visible for testing @@ -691,26 +687,43 @@ public class AzureBlobStore implements BlobStore { Operation(String key) { this.key = key; } + + public static Operation fromKey(String key) { + for (Operation operation : Operation.values()) { + if (operation.key.equals(key)) { + return operation; + } + } + throw new IllegalArgumentException("No matching key: " + key); + } } - private record StatsKey(Operation operation, OperationPurpose purpose) { + // visible for testing + record StatsKey(Operation operation, OperationPurpose purpose) { @Override public String toString() { return purpose.getKey() + "_" + operation.getKey(); } } - private static class StatsCollectors { - final Map collectors = new ConcurrentHashMap<>(); + // visible for testing + class RequestMetricsRecorder { + private final RepositoriesMetrics repositoriesMetrics; + final Map opsCounters = new ConcurrentHashMap<>(); + final Map> opsAttributes = new ConcurrentHashMap<>(); + + RequestMetricsRecorder(RepositoriesMetrics repositoriesMetrics) { + this.repositoriesMetrics = repositoriesMetrics; + } Map statsMap(boolean stateless) { if (stateless) { - return collectors.entrySet() + return opsCounters.entrySet() .stream() .collect(Collectors.toUnmodifiableMap(e -> e.getKey().toString(), e -> e.getValue().sum())); } else { Map normalisedStats = Arrays.stream(Operation.values()).collect(Collectors.toMap(Operation::getKey, o -> 0L)); - collectors.forEach( + opsCounters.forEach( (key, value) -> normalisedStats.compute( key.operation.getKey(), (k, current) -> Objects.requireNonNull(current) + value.sum() @@ -720,11 +733,50 @@ public class AzureBlobStore implements BlobStore { } } - public void onSuccessfulRequest(Operation operation, OperationPurpose purpose) { - collectors.computeIfAbsent(new StatsKey(operation, purpose), k -> new LongAdder()).increment(); + public void onRequestComplete(Operation operation, OperationPurpose purpose, AzureClientProvider.RequestMetrics requestMetrics) { + final StatsKey statsKey = new StatsKey(operation, purpose); + final LongAdder counter = opsCounters.computeIfAbsent(statsKey, k -> new LongAdder()); + final Map attributes = opsAttributes.computeIfAbsent( + statsKey, + k -> RepositoriesMetrics.createAttributesMap(repositoryMetadata, purpose, operation.getKey()) + ); + + counter.add(1); + + // range not satisfied is not retried, so we count them by checking the final response + if (requestMetrics.getStatusCode() == RestStatus.REQUESTED_RANGE_NOT_SATISFIED.getStatus()) { + repositoriesMetrics.requestRangeNotSatisfiedExceptionCounter().incrementBy(1, attributes); + } + + repositoriesMetrics.operationCounter().incrementBy(1, attributes); + if (RestStatus.isSuccessful(requestMetrics.getStatusCode()) == false) { + repositoriesMetrics.unsuccessfulOperationCounter().incrementBy(1, attributes); + } + + repositoriesMetrics.requestCounter().incrementBy(requestMetrics.getRequestCount(), attributes); + if (requestMetrics.getErrorCount() > 0) { + repositoriesMetrics.exceptionCounter().incrementBy(requestMetrics.getErrorCount(), attributes); + repositoriesMetrics.exceptionHistogram().record(requestMetrics.getErrorCount(), attributes); + } + + if (requestMetrics.getThrottleCount() > 0) { + repositoriesMetrics.throttleCounter().incrementBy(requestMetrics.getThrottleCount(), attributes); + repositoriesMetrics.throttleHistogram().record(requestMetrics.getThrottleCount(), attributes); + } + + // We use nanosecond precision, so a zero value indicates that no requests were executed + if (requestMetrics.getTotalRequestTimeNanos() > 0) { + repositoriesMetrics.httpRequestTimeInMillisHistogram() + .record(TimeUnit.NANOSECONDS.toMillis(requestMetrics.getTotalRequestTimeNanos()), attributes); + } } } + // visible for testing + RequestMetricsRecorder getMetricsRecorder() { + return requestMetricsRecorder; + } + private static class AzureInputStream extends InputStream { private final CancellableRateLimitedFluxIterator cancellableRateLimitedFluxIterator; private ByteBuf byteBuf; @@ -846,26 +898,11 @@ public class AzureBlobStore implements BlobStore { } } - private static class RequestStatsCollector { - private final BiPredicate filter; - private final Consumer onHttpRequest; + private record RequestMatcher(BiPredicate filter, Operation operation) { - private RequestStatsCollector(BiPredicate filter, Consumer onHttpRequest) { - this.filter = filter; - this.onHttpRequest = onHttpRequest; - } - - static RequestStatsCollector create(BiPredicate filter, Consumer consumer) { - return new RequestStatsCollector(filter, consumer); - } - - private boolean shouldConsumeRequestInfo(HttpMethod httpMethod, URL url) { + private boolean matches(HttpMethod httpMethod, URL url) { return filter.test(httpMethod, url); } - - private void consumeHttpRequestInfo(OperationPurpose operationPurpose) { - onHttpRequest.accept(operationPurpose); - } } OptionalBytesReference getRegister(OperationPurpose purpose, String blobPath, String containerPath, String blobKey) { diff --git a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureClientProvider.java b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureClientProvider.java index ae497ff15957..654742c98026 100644 --- a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureClientProvider.java +++ b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureClientProvider.java @@ -24,6 +24,7 @@ import com.azure.core.http.HttpClient; import com.azure.core.http.HttpMethod; import com.azure.core.http.HttpPipelineCallContext; import com.azure.core.http.HttpPipelineNextPolicy; +import com.azure.core.http.HttpPipelinePosition; import com.azure.core.http.HttpRequest; import com.azure.core.http.HttpResponse; import com.azure.core.http.ProxyOptions; @@ -44,11 +45,13 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.repositories.azure.executors.PrivilegedExecutor; import org.elasticsearch.repositories.azure.executors.ReactorScheduledExecutorService; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.netty4.NettyAllocator; import java.net.URL; import java.time.Duration; +import java.util.Optional; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.ThreadFactory; @@ -57,6 +60,8 @@ import static org.elasticsearch.repositories.azure.AzureRepositoryPlugin.NETTY_E import static org.elasticsearch.repositories.azure.AzureRepositoryPlugin.REPOSITORY_THREAD_POOL_NAME; class AzureClientProvider extends AbstractLifecycleComponent { + private static final Logger logger = LogManager.getLogger(AzureClientProvider.class); + private static final TimeValue DEFAULT_CONNECTION_TIMEOUT = TimeValue.timeValueSeconds(30); private static final TimeValue DEFAULT_MAX_CONNECTION_IDLE_TIME = TimeValue.timeValueSeconds(60); private static final int DEFAULT_MAX_CONNECTIONS = 50; @@ -160,7 +165,7 @@ class AzureClientProvider extends AbstractLifecycleComponent { LocationMode locationMode, RequestRetryOptions retryOptions, ProxyOptions proxyOptions, - SuccessfulRequestHandler successfulRequestHandler, + RequestMetricsHandler requestMetricsHandler, OperationPurpose purpose ) { if (closed) { @@ -189,8 +194,9 @@ class AzureClientProvider extends AbstractLifecycleComponent { builder.credential(credentialBuilder.build()); } - if (successfulRequestHandler != null) { - builder.addPolicy(new SuccessfulRequestTracker(purpose, successfulRequestHandler)); + if (requestMetricsHandler != null) { + builder.addPolicy(new RequestMetricsTracker(purpose, requestMetricsHandler)); + builder.addPolicy(RetryMetricsTracker.INSTANCE); } if (locationMode.isSecondary()) { @@ -259,38 +265,135 @@ class AzureClientProvider extends AbstractLifecycleComponent { @Override protected void doClose() {} - private static final class SuccessfulRequestTracker implements HttpPipelinePolicy { - private static final Logger logger = LogManager.getLogger(SuccessfulRequestTracker.class); - private final OperationPurpose purpose; - private final SuccessfulRequestHandler onSuccessfulRequest; + static class RequestMetrics { + private volatile long totalRequestTimeNanos = 0; + private volatile int requestCount; + private volatile int errorCount; + private volatile int throttleCount; + private volatile int statusCode; - private SuccessfulRequestTracker(OperationPurpose purpose, SuccessfulRequestHandler onSuccessfulRequest) { + int getRequestCount() { + return requestCount; + } + + int getErrorCount() { + return errorCount; + } + + int getStatusCode() { + return statusCode; + } + + int getThrottleCount() { + return throttleCount; + } + + /** + * Total time spent executing requests to complete operation in nanoseconds + */ + long getTotalRequestTimeNanos() { + return totalRequestTimeNanos; + } + + @Override + public String toString() { + return "RequestMetrics{" + + "totalRequestTimeNanos=" + + totalRequestTimeNanos + + ", requestCount=" + + requestCount + + ", errorCount=" + + errorCount + + ", throttleCount=" + + throttleCount + + ", statusCode=" + + statusCode + + '}'; + } + } + + private enum RetryMetricsTracker implements HttpPipelinePolicy { + INSTANCE; + + @Override + public Mono process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) { + Optional metricsData = context.getData(RequestMetricsTracker.ES_REQUEST_METRICS_CONTEXT_KEY); + if (metricsData.isPresent() == false) { + assert false : "No metrics object associated with request " + context.getHttpRequest(); + return next.process(); + } + RequestMetrics metrics = (RequestMetrics) metricsData.get(); + metrics.requestCount++; + long requestStartTimeNanos = System.nanoTime(); + return next.process().doOnError(throwable -> { + metrics.totalRequestTimeNanos += System.nanoTime() - requestStartTimeNanos; + logger.debug("Detected error in RetryMetricsTracker", throwable); + metrics.errorCount++; + }).doOnSuccess(response -> { + metrics.totalRequestTimeNanos += System.nanoTime() - requestStartTimeNanos; + if (RestStatus.isSuccessful(response.getStatusCode()) == false) { + metrics.errorCount++; + // Azure always throttles with a 429 response, see + // https://learn.microsoft.com/en-us/azure/azure-resource-manager/management/request-limits-and-throttling#error-code + if (response.getStatusCode() == RestStatus.TOO_MANY_REQUESTS.getStatus()) { + metrics.throttleCount++; + } + } + }); + } + + @Override + public HttpPipelinePosition getPipelinePosition() { + return HttpPipelinePosition.PER_RETRY; + } + } + + private static final class RequestMetricsTracker implements HttpPipelinePolicy { + private static final String ES_REQUEST_METRICS_CONTEXT_KEY = "_es_azure_repo_request_stats"; + private static final Logger logger = LogManager.getLogger(RequestMetricsTracker.class); + private final OperationPurpose purpose; + private final RequestMetricsHandler requestMetricsHandler; + + private RequestMetricsTracker(OperationPurpose purpose, RequestMetricsHandler requestMetricsHandler) { this.purpose = purpose; - this.onSuccessfulRequest = onSuccessfulRequest; + this.requestMetricsHandler = requestMetricsHandler; } @Override public Mono process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) { - return next.process().doOnSuccess(httpResponse -> trackSuccessfulRequest(context.getHttpRequest(), httpResponse)); + final RequestMetrics requestMetrics = new RequestMetrics(); + context.setData(ES_REQUEST_METRICS_CONTEXT_KEY, requestMetrics); + return next.process().doOnSuccess((httpResponse) -> { + requestMetrics.statusCode = httpResponse.getStatusCode(); + trackCompletedRequest(context.getHttpRequest(), requestMetrics); + }).doOnError(throwable -> { + logger.debug("Detected error in RequestMetricsTracker", throwable); + trackCompletedRequest(context.getHttpRequest(), requestMetrics); + }); } - private void trackSuccessfulRequest(HttpRequest httpRequest, HttpResponse httpResponse) { + private void trackCompletedRequest(HttpRequest httpRequest, RequestMetrics requestMetrics) { HttpMethod method = httpRequest.getHttpMethod(); - if (httpResponse != null && method != null && httpResponse.getStatusCode() > 199 && httpResponse.getStatusCode() <= 299) { + if (method != null) { try { - onSuccessfulRequest.onSuccessfulRequest(purpose, method, httpRequest.getUrl()); + requestMetricsHandler.requestCompleted(purpose, method, httpRequest.getUrl(), requestMetrics); } catch (Exception e) { logger.warn("Unable to notify a successful request", e); } } } + + @Override + public HttpPipelinePosition getPipelinePosition() { + return HttpPipelinePosition.PER_CALL; + } } /** - * The {@link SuccessfulRequestTracker} calls this when a request completes successfully + * The {@link RequestMetricsTracker} calls this when a request completes */ - interface SuccessfulRequestHandler { + interface RequestMetricsHandler { - void onSuccessfulRequest(OperationPurpose purpose, HttpMethod method, URL url); + void requestCompleted(OperationPurpose purpose, HttpMethod method, URL url, RequestMetrics metrics); } } diff --git a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java index aec148adf9aa..80e662343bae 100644 --- a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java +++ b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.indices.recovery.RecoverySettings; +import org.elasticsearch.repositories.RepositoriesMetrics; import org.elasticsearch.repositories.blobstore.MeteredBlobStoreRepository; import org.elasticsearch.xcontent.NamedXContentRegistry; @@ -91,6 +92,7 @@ public class AzureRepository extends MeteredBlobStoreRepository { private final ByteSizeValue chunkSize; private final AzureStorageService storageService; private final boolean readonly; + private final RepositoriesMetrics repositoriesMetrics; public AzureRepository( final RepositoryMetadata metadata, @@ -98,7 +100,8 @@ public class AzureRepository extends MeteredBlobStoreRepository { final AzureStorageService storageService, final ClusterService clusterService, final BigArrays bigArrays, - final RecoverySettings recoverySettings + final RecoverySettings recoverySettings, + final RepositoriesMetrics repositoriesMetrics ) { super( metadata, @@ -111,6 +114,7 @@ public class AzureRepository extends MeteredBlobStoreRepository { ); this.chunkSize = Repository.CHUNK_SIZE_SETTING.get(metadata.settings()); this.storageService = storageService; + this.repositoriesMetrics = repositoriesMetrics; // If the user explicitly did not define a readonly value, we set it by ourselves depending on the location mode setting. // For secondary_only setting, the repository should be read only @@ -152,7 +156,7 @@ public class AzureRepository extends MeteredBlobStoreRepository { @Override protected AzureBlobStore createBlobStore() { - final AzureBlobStore blobStore = new AzureBlobStore(metadata, storageService, bigArrays); + final AzureBlobStore blobStore = new AzureBlobStore(metadata, storageService, bigArrays, repositoriesMetrics); logger.debug( () -> format( diff --git a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepositoryPlugin.java b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepositoryPlugin.java index c3cd5e78c5db..4556e63378fe 100644 --- a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepositoryPlugin.java +++ b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepositoryPlugin.java @@ -71,7 +71,15 @@ public class AzureRepositoryPlugin extends Plugin implements RepositoryPlugin, R return Collections.singletonMap(AzureRepository.TYPE, metadata -> { AzureStorageService storageService = azureStoreService.get(); assert storageService != null; - return new AzureRepository(metadata, namedXContentRegistry, storageService, clusterService, bigArrays, recoverySettings); + return new AzureRepository( + metadata, + namedXContentRegistry, + storageService, + clusterService, + bigArrays, + recoverySettings, + repositoriesMetrics + ); }); } diff --git a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageService.java b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageService.java index c6e85e44d24d..7373ed948578 100644 --- a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageService.java +++ b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageService.java @@ -91,7 +91,7 @@ public class AzureStorageService { String clientName, LocationMode locationMode, OperationPurpose purpose, - AzureClientProvider.SuccessfulRequestHandler successfulRequestHandler + AzureClientProvider.RequestMetricsHandler requestMetricsHandler ) { final AzureStorageSettings azureStorageSettings = getClientSettings(clientName); @@ -102,7 +102,7 @@ public class AzureStorageService { locationMode, retryOptions, proxyOptions, - successfulRequestHandler, + requestMetricsHandler, purpose ); } diff --git a/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AbstractAzureServerTestCase.java b/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AbstractAzureServerTestCase.java index 1962bddd8fdb..cb9facc061a2 100644 --- a/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AbstractAzureServerTestCase.java +++ b/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AbstractAzureServerTestCase.java @@ -29,6 +29,7 @@ import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.mocksocket.MockHttpServer; +import org.elasticsearch.repositories.RepositoriesMetrics; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; @@ -168,7 +169,10 @@ public abstract class AbstractAzureServerTestCase extends ESTestCase { .build() ); - return new AzureBlobContainer(BlobPath.EMPTY, new AzureBlobStore(repositoryMetadata, service, BigArrays.NON_RECYCLING_INSTANCE)); + return new AzureBlobContainer( + BlobPath.EMPTY, + new AzureBlobStore(repositoryMetadata, service, BigArrays.NON_RECYCLING_INSTANCE, RepositoriesMetrics.NOOP) + ); } protected static byte[] randomBlobContent() { diff --git a/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureClientProviderTests.java b/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureClientProviderTests.java index 7d82f2d5029f..2699438de8ac 100644 --- a/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureClientProviderTests.java +++ b/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureClientProviderTests.java @@ -26,7 +26,7 @@ import java.util.Map; import java.util.concurrent.TimeUnit; public class AzureClientProviderTests extends ESTestCase { - private static final AzureClientProvider.SuccessfulRequestHandler EMPTY_CONSUMER = (purpose, method, url) -> {}; + private static final AzureClientProvider.RequestMetricsHandler NOOP_HANDLER = (purpose, method, url, metrics) -> {}; private ThreadPool threadPool; private AzureClientProvider azureClientProvider; @@ -76,7 +76,7 @@ public class AzureClientProviderTests extends ESTestCase { locationMode, requestRetryOptions, null, - EMPTY_CONSUMER, + NOOP_HANDLER, randomFrom(OperationPurpose.values()) ); } @@ -106,7 +106,7 @@ public class AzureClientProviderTests extends ESTestCase { locationMode, requestRetryOptions, null, - EMPTY_CONSUMER, + NOOP_HANDLER, randomFrom(OperationPurpose.values()) ) ); diff --git a/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java b/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java index 7037dd4eaf11..3afacb5b7426 100644 --- a/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java +++ b/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.env.Environment; import org.elasticsearch.indices.recovery.RecoverySettings; +import org.elasticsearch.repositories.RepositoriesMetrics; import org.elasticsearch.repositories.blobstore.BlobStoreTestUtil; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.NamedXContentRegistry; @@ -40,7 +41,8 @@ public class AzureRepositorySettingsTests extends ESTestCase { mock(AzureStorageService.class), BlobStoreTestUtil.mockClusterService(), MockBigArrays.NON_RECYCLING_INSTANCE, - new RecoverySettings(settings, new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)) + new RecoverySettings(settings, new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), + RepositoriesMetrics.NOOP ); assertThat(azureRepository.getBlobStore(), is(nullValue())); return azureRepository; diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java index bd5723b4dbcc..3e6b7c356cb1 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.repositories.RepositoriesMetrics; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; @@ -144,16 +145,7 @@ class S3BlobStore implements BlobStore { private IgnoreNoResponseMetricsCollector(Operation operation, OperationPurpose purpose) { this.operation = operation; - this.attributes = Map.of( - "repo_type", - S3Repository.TYPE, - "repo_name", - repositoryMetadata.name(), - "operation", - operation.getKey(), - "purpose", - purpose.getKey() - ); + this.attributes = RepositoriesMetrics.createAttributesMap(repositoryMetadata, purpose, operation.getKey()); } @Override diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesMetrics.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesMetrics.java index cce3c764fe7a..2cd6e2b11ef7 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesMetrics.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesMetrics.java @@ -9,10 +9,17 @@ package org.elasticsearch.repositories; +import org.elasticsearch.cluster.metadata.RepositoryMetadata; +import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.telemetry.metric.LongCounter; import org.elasticsearch.telemetry.metric.LongHistogram; import org.elasticsearch.telemetry.metric.MeterRegistry; +import java.util.Map; + +/** + * The common set of metrics that we publish for {@link org.elasticsearch.repositories.blobstore.BlobStoreRepository} implementations. + */ public record RepositoriesMetrics( MeterRegistry meterRegistry, LongCounter requestCounter, @@ -28,15 +35,65 @@ public record RepositoriesMetrics( public static RepositoriesMetrics NOOP = new RepositoriesMetrics(MeterRegistry.NOOP); + /** + * Is incremented for each request sent to the blob store (including retries) + * + * Exposed as {@link #requestCounter()} + */ public static final String METRIC_REQUESTS_TOTAL = "es.repositories.requests.total"; + /** + * Is incremented for each request which returns a non 2xx response OR fails to return a response + * (includes throttling and retryable errors) + * + * Exposed as {@link #exceptionCounter()} + */ public static final String METRIC_EXCEPTIONS_TOTAL = "es.repositories.exceptions.total"; + /** + * Is incremented each time an operation ends with a 416 response + * + * Exposed as {@link #requestRangeNotSatisfiedExceptionCounter()} + */ public static final String METRIC_EXCEPTIONS_REQUEST_RANGE_NOT_SATISFIED_TOTAL = "es.repositories.exceptions.request_range_not_satisfied.total"; + /** + * Is incremented each time we are throttled by the blob store, e.g. upon receiving an HTTP 429 response + * + * Exposed as {@link #throttleCounter()} + */ public static final String METRIC_THROTTLES_TOTAL = "es.repositories.throttles.total"; + /** + * Is incremented for each operation we attempt, whether it succeeds or fails, this doesn't include retries + * + * Exposed via {@link #operationCounter()} + */ public static final String METRIC_OPERATIONS_TOTAL = "es.repositories.operations.total"; + /** + * Is incremented for each operation that ends with a non 2xx response or throws an exception + * + * Exposed via {@link #unsuccessfulOperationCounter()} + */ public static final String METRIC_UNSUCCESSFUL_OPERATIONS_TOTAL = "es.repositories.operations.unsuccessful.total"; + /** + * Each time an operation has one or more failed requests (from non 2xx response or exception), the + * count of those is sampled + * + * Exposed via {@link #exceptionHistogram()} + */ public static final String METRIC_EXCEPTIONS_HISTOGRAM = "es.repositories.exceptions.histogram"; + /** + * Each time an operation has one or more throttled requests, the count of those is sampled + * + * Exposed via {@link #throttleHistogram()} + */ public static final String METRIC_THROTTLES_HISTOGRAM = "es.repositories.throttles.histogram"; + /** + * Every operation that is attempted will record a time. The value recorded here is the sum of the duration of + * each of the requests executed to try and complete the operation. The duration of each request is the time + * between sending the request and either a response being received, or the request failing. Does not include + * the consumption of the body of the response or any time spent pausing between retries. + * + * Exposed via {@link #httpRequestTimeInMillisHistogram()} + */ public static final String HTTP_REQUEST_TIME_IN_MILLIS_HISTOGRAM = "es.repositories.requests.http_request_time.histogram"; public RepositoriesMetrics(MeterRegistry meterRegistry) { @@ -61,4 +118,25 @@ public record RepositoriesMetrics( ) ); } + + /** + * Create the map of attributes we expect to see on repository metrics + */ + public static Map createAttributesMap( + RepositoryMetadata repositoryMetadata, + OperationPurpose purpose, + String operation + ) { + return Map.of( + "repo_type", + repositoryMetadata.type(), + "repo_name", + repositoryMetadata.name(), + "operation", + operation, + "purpose", + purpose.getKey() + ); + } + } diff --git a/server/src/main/java/org/elasticsearch/rest/RestStatus.java b/server/src/main/java/org/elasticsearch/rest/RestStatus.java index 72227b2d26ec..569b63edda00 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestStatus.java +++ b/server/src/main/java/org/elasticsearch/rest/RestStatus.java @@ -571,4 +571,16 @@ public enum RestStatus { public static RestStatus fromCode(int code) { return CODE_TO_STATUS.get(code); } + + /** + * Utility method to determine if an HTTP status code is "Successful" + * + * as defined by RFC 9110 + * + * @param code An HTTP status code + * @return true if it is a 2xx code, false otherwise + */ + public static boolean isSuccessful(int code) { + return code >= 200 && code < 300; + } } From 40bddafd92ea3ed1d3e28e694883b11dfd6516ad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Tue, 8 Oct 2024 08:41:11 +0200 Subject: [PATCH 27/85] [DOCS] Adds DeBERTa v2 tokenization params to infer trained model API docs (#114242) * [DOCS] Adds DeBERTa v2 tokenization params to infer trained model API docs. * [DOCS] Mode edits. --- .../apis/infer-trained-model.asciidoc | 89 +++++++++++++++++++ 1 file changed, 89 insertions(+) diff --git a/docs/reference/ml/trained-models/apis/infer-trained-model.asciidoc b/docs/reference/ml/trained-models/apis/infer-trained-model.asciidoc index 99c3ecad03a9..7acbc0bd2385 100644 --- a/docs/reference/ml/trained-models/apis/infer-trained-model.asciidoc +++ b/docs/reference/ml/trained-models/apis/infer-trained-model.asciidoc @@ -225,6 +225,17 @@ include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizatio (Optional, string) include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= +`deberta_v2`:::: +(Optional, object) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-deberta-v2] ++ +.Properties of deberta_v2 +[%collapsible%open] +======= +`truncate`:::: +(Optional, string) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate-deberta-v2] +======= `roberta`:::: (Optional, object) include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] @@ -301,6 +312,17 @@ include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizatio (Optional, string) include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= +`deberta_v2`:::: +(Optional, object) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-deberta-v2] ++ +.Properties of deberta_v2 +[%collapsible%open] +======= +`truncate`:::: +(Optional, string) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate-deberta-v2] +======= `roberta`:::: (Optional, object) include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] @@ -397,6 +419,21 @@ include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizatio (Optional, string) include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= +`deberta_v2`:::: +(Optional, object) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-deberta-v2] ++ +.Properties of deberta_v2 +[%collapsible%open] +======= +`span`:::: +(Optional, integer) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] + +`truncate`:::: +(Optional, string) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate-deberta-v2] +======= `roberta`:::: (Optional, object) include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] @@ -517,6 +554,21 @@ include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizatio (Optional, string) include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= +`deberta_v2`:::: +(Optional, object) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-deberta-v2] ++ +.Properties of deberta_v2 +[%collapsible%open] +======= +`span`:::: +(Optional, integer) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] + +`truncate`:::: +(Optional, string) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate-deberta-v2] +======= `roberta`:::: (Optional, object) include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] @@ -608,6 +660,17 @@ include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizatio (Optional, string) include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= +`deberta_v2`:::: +(Optional, object) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-deberta-v2] ++ +.Properties of deberta_v2 +[%collapsible%open] +======= +`truncate`:::: +(Optional, string) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate-deberta-v2] +======= `roberta`:::: (Optional, object) include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] @@ -687,6 +750,21 @@ include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizatio (Optional, integer) include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] +`with_special_tokens`:::: +(Optional, boolean) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-with-special-tokens] +======= +`deberta_v2`:::: +(Optional, object) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-deberta-v2] ++ +.Properties of deberta_v2 +[%collapsible%open] +======= +`span`:::: +(Optional, integer) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-span] + `with_special_tokens`:::: (Optional, boolean) include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-bert-with-special-tokens] @@ -790,6 +868,17 @@ include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenizatio (Optional, string) include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate] ======= +`deberta_v2`:::: +(Optional, object) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-deberta-v2] ++ +.Properties of deberta_v2 +[%collapsible%open] +======= +`truncate`:::: +(Optional, string) +include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-truncate-deberta-v2] +======= `roberta`:::: (Optional, object) include::{es-ref-dir}/ml/ml-shared.asciidoc[tag=inference-config-nlp-tokenization-roberta] From 07c3acf1c0aecc53234602aab55258530cfd8efc Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 8 Oct 2024 07:59:57 +0100 Subject: [PATCH 28/85] Remove cluster state from `/_cluster/reroute` response (#114231) Including the cluster state in responses to the `POST _cluster/state` API was deprecated in #90399 (v8.6.0) requiring callers to pass `?metric=none` to avoid the deprecation warning. This commit adjusts the behaviour as promised in v9 so that this API never returns the cluster state, and deprecates the `?metric` parameter itself. Closes #88978 --- docs/changelog/114231.yaml | 17 +++++++ docs/reference/cluster/reroute.asciidoc | 4 +- docs/reference/commands/shard-tool.asciidoc | 2 +- .../red-yellow-cluster-status.asciidoc | 22 ++++----- .../upgrades/SnapshotBasedRecoveryIT.java | 2 +- .../test/cluster.reroute/10_basic.yml | 7 ++- .../test/cluster.reroute/11_explain.yml | 9 +++- .../20_deprecated_response_filtering.yml | 46 ++++++++++++++----- server/src/main/java/module-info.java | 1 + .../reroute/ClusterRerouteResponse.java | 2 - .../decider/MaxRetryAllocationDecider.java | 2 +- .../admin/cluster/ClusterRerouteFeatures.java | 24 ++++++++++ .../cluster/RestClusterRerouteAction.java | 34 ++++++++++++-- ...lasticsearch.features.FeatureSpecification | 1 + .../MaxRetryAllocationDeciderTests.java | 4 +- .../actions/SearchableSnapshotActionIT.java | 2 +- 16 files changed, 138 insertions(+), 41 deletions(-) create mode 100644 docs/changelog/114231.yaml create mode 100644 server/src/main/java/org/elasticsearch/rest/action/admin/cluster/ClusterRerouteFeatures.java diff --git a/docs/changelog/114231.yaml b/docs/changelog/114231.yaml new file mode 100644 index 000000000000..61c447688edc --- /dev/null +++ b/docs/changelog/114231.yaml @@ -0,0 +1,17 @@ +pr: 114231 +summary: Remove cluster state from `/_cluster/reroute` response +area: Allocation +type: breaking +issues: + - 88978 +breaking: + title: Remove cluster state from `/_cluster/reroute` response + area: REST API + details: >- + The `POST /_cluster/reroute` API no longer returns the cluster state in its + response. The `?metric` query parameter to this API now has no effect and + its use will be forbidden in a future version. + impact: >- + Cease usage of the `?metric` query parameter when calling the + `POST /_cluster/reroute` API. + notable: false diff --git a/docs/reference/cluster/reroute.asciidoc b/docs/reference/cluster/reroute.asciidoc index b4e4809ae73b..429070f80b9b 100644 --- a/docs/reference/cluster/reroute.asciidoc +++ b/docs/reference/cluster/reroute.asciidoc @@ -10,7 +10,7 @@ Changes the allocation of shards in a cluster. [[cluster-reroute-api-request]] ==== {api-request-title} -`POST /_cluster/reroute?metric=none` +`POST /_cluster/reroute` [[cluster-reroute-api-prereqs]] ==== {api-prereq-title} @@ -193,7 +193,7 @@ This is a short example of a simple reroute API call: [source,console] -------------------------------------------------- -POST /_cluster/reroute?metric=none +POST /_cluster/reroute { "commands": [ { diff --git a/docs/reference/commands/shard-tool.asciidoc b/docs/reference/commands/shard-tool.asciidoc index a2d9d557adf5..b1e63740cede 100644 --- a/docs/reference/commands/shard-tool.asciidoc +++ b/docs/reference/commands/shard-tool.asciidoc @@ -95,7 +95,7 @@ Changing allocation id V8QXk-QXSZinZMT-NvEq4w to tjm9Ve6uTBewVFAlfUMWjA You should run the following command to allocate this shard: -POST /_cluster/reroute?metric=none +POST /_cluster/reroute { "commands" : [ { diff --git a/docs/reference/troubleshooting/common-issues/red-yellow-cluster-status.asciidoc b/docs/reference/troubleshooting/common-issues/red-yellow-cluster-status.asciidoc index cae4eb99dd54..eb56a37562c3 100644 --- a/docs/reference/troubleshooting/common-issues/red-yellow-cluster-status.asciidoc +++ b/docs/reference/troubleshooting/common-issues/red-yellow-cluster-status.asciidoc @@ -2,12 +2,12 @@ === Red or yellow cluster health status A red or yellow cluster health status indicates one or more shards are not assigned to -a node. +a node. * **Red health status**: The cluster has some unassigned primary shards, which -means that some operations such as searches and indexing may fail. -* **Yellow health status**: The cluster has no unassigned primary shards but some -unassigned replica shards. This increases your risk of data loss and can degrade +means that some operations such as searches and indexing may fail. +* **Yellow health status**: The cluster has no unassigned primary shards but some +unassigned replica shards. This increases your risk of data loss and can degrade cluster performance. When your cluster has a red or yellow health status, it will continue to process @@ -16,8 +16,8 @@ cleanup activities until the cluster returns to green health status. For instanc some <> actions require the index on which they operate to have a green health status. -In many cases, your cluster will recover to green health status automatically. -If the cluster doesn't automatically recover, then you must <> +In many cases, your cluster will recover to green health status automatically. +If the cluster doesn't automatically recover, then you must <> the remaining problems so management and cleanup activities can proceed. [discrete] @@ -107,7 +107,7 @@ asynchronously in the background. [source,console] ---- -POST _cluster/reroute?metric=none +POST _cluster/reroute ---- [discrete] @@ -231,10 +231,10 @@ unassigned. See <>. If a node containing a primary shard is lost, {es} can typically replace it using a replica on another node. If you can't recover the node and replicas -don't exist or are irrecoverable, <> will report `no_valid_shard_copy` and you'll need to do one of the following: +don't exist or are irrecoverable, <> will report `no_valid_shard_copy` and you'll need to do one of the following: -* restore the missing data from <> +* restore the missing data from <> * index the missing data from its original data source * accept data loss on the index-level by running <> * accept data loss on the shard-level by executing <> allocate_stale_primary or allocate_empty_primary command with `accept_data_loss: true` @@ -246,7 +246,7 @@ resulting in data loss. + [source,console] ---- -POST _cluster/reroute?metric=none +POST _cluster/reroute { "commands": [ { diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java index 6f4c37f9e56a..3343a683bbd1 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java @@ -203,7 +203,7 @@ public class SnapshotBasedRecoveryIT extends AbstractRollingUpgradeTestCase { } builder.endObject(); - Request request = new Request(HttpPost.METHOD_NAME, "/_cluster/reroute?pretty&metric=none"); + Request request = new Request(HttpPost.METHOD_NAME, "/_cluster/reroute?pretty"); request.setJsonEntity(Strings.toString(builder)); Response response = client().performRequest(request); logger.info("--> Relocated primary to an older version {}", EntityUtils.toString(response.getEntity())); diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.reroute/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.reroute/10_basic.yml index f7378cc01dc0..d73efed1f757 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.reroute/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.reroute/10_basic.yml @@ -1,5 +1,8 @@ --- "Basic sanity check": + - requires: + cluster_features: ["cluster.reroute.ignores_metric_param"] + reason: requires this feature + - do: - cluster.reroute: - metric: none + cluster.reroute: {} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.reroute/11_explain.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.reroute/11_explain.yml index 7543c96b232d..3584ce966670 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.reroute/11_explain.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.reroute/11_explain.yml @@ -13,12 +13,14 @@ setup: --- "Explain API with empty command list": + - requires: + cluster_features: ["cluster.reroute.ignores_metric_param"] + reason: requires this feature - do: cluster.reroute: explain: true dry_run: true - metric: none body: commands: [] @@ -26,6 +28,10 @@ setup: --- "Explain API for non-existent node & shard": + - requires: + cluster_features: ["cluster.reroute.ignores_metric_param"] + reason: requires this feature + - skip: features: [arbitrary_key] @@ -39,7 +45,6 @@ setup: cluster.reroute: explain: true dry_run: true - metric: none body: commands: - cancel: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.reroute/20_deprecated_response_filtering.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.reroute/20_deprecated_response_filtering.yml index 3bc27f53ad67..9775fbd1bea8 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.reroute/20_deprecated_response_filtering.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cluster.reroute/20_deprecated_response_filtering.yml @@ -1,21 +1,45 @@ --- -"Do not return metadata by default and produce deprecation warning": - - skip: - features: [ "allowed_warnings" ] +"Do not return metadata by default and emit no warning": + - requires: + cluster_features: ["cluster.reroute.ignores_metric_param"] + reason: requires this feature + - do: cluster.reroute: {} - allowed_warnings: - - "The [state] field in the response to the reroute API is deprecated and will be removed in a future version. Specify ?metric=none to adopt the future behaviour." - - is_false: state.metadata + - is_false: state + --- -"If requested return metadata and produce deprecation warning": +"Do not return metadata with ?metric=none and produce deprecation warning": + - requires: + cluster_features: ["cluster.reroute.ignores_metric_param"] + reason: requires this feature + - skip: features: [ "allowed_warnings" ] + + - do: + cluster.reroute: + metric: none + allowed_warnings: + - >- + the [?metric] query parameter to the [POST /_cluster/reroute] API has no effect; + its use will be forbidden in a future version + - is_false: state + +--- +"Do not return metadata with ?metric=metadata and produce deprecation warning": + - requires: + cluster_features: ["cluster.reroute.ignores_metric_param"] + reason: requires this feature + + - skip: + features: [ "allowed_warnings" ] + - do: cluster.reroute: metric: metadata allowed_warnings: - - "The [state] field in the response to the reroute API is deprecated and will be removed in a future version. Specify ?metric=none to adopt the future behaviour." - - is_true: state.metadata - - is_false: state.nodes - + - >- + the [?metric] query parameter to the [POST /_cluster/reroute] API has no effect; + its use will be forbidden in a future version + - is_false: state diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 56672957dd57..11965abf1dcd 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -429,6 +429,7 @@ module org.elasticsearch.server { org.elasticsearch.indices.IndicesFeatures, org.elasticsearch.repositories.RepositoriesFeatures, org.elasticsearch.action.admin.cluster.allocation.AllocationStatsFeatures, + org.elasticsearch.rest.action.admin.cluster.ClusterRerouteFeatures, org.elasticsearch.index.mapper.MapperFeatures, org.elasticsearch.ingest.IngestGeoIpFeatures, org.elasticsearch.search.SearchFeatures, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java index 4aa6ed60afe4..b0ec0968f8d1 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java @@ -22,7 +22,6 @@ import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.UpdateForV10; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.xcontent.ToXContent; @@ -45,7 +44,6 @@ public class ClusterRerouteResponse extends ActionResponse implements IsAcknowle /** * To be removed when REST compatibility with {@link org.elasticsearch.Version#V_8_6_0} / {@link RestApiVersion#V_8} no longer needed */ - @UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) // to remove from the v9 API only @UpdateForV10(owner = UpdateForV10.Owner.DISTRIBUTED_COORDINATION) // to remove entirely private final ClusterState state; private final RoutingExplanations explanations; diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/MaxRetryAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/MaxRetryAllocationDecider.java index b20cd3ecaf99..a55522ff14c8 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/MaxRetryAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/MaxRetryAllocationDecider.java @@ -35,7 +35,7 @@ public class MaxRetryAllocationDecider extends AllocationDecider { Setting.Property.NotCopyableOnResize ); - private static final String RETRY_FAILED_API = "POST /_cluster/reroute?retry_failed&metric=none"; + private static final String RETRY_FAILED_API = "POST /_cluster/reroute?retry_failed"; public static final String NAME = "max_retry"; diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/ClusterRerouteFeatures.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/ClusterRerouteFeatures.java new file mode 100644 index 000000000000..c6582cab4a2d --- /dev/null +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/ClusterRerouteFeatures.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.rest.action.admin.cluster; + +import org.elasticsearch.features.FeatureSpecification; +import org.elasticsearch.features.NodeFeature; + +import java.util.Set; + +public class ClusterRerouteFeatures implements FeatureSpecification { + public static final NodeFeature CLUSTER_REROUTE_IGNORES_METRIC_PARAM = new NodeFeature("cluster.reroute.ignores_metric_param"); + + @Override + public Set getFeatures() { + return Set.of(CLUSTER_REROUTE_IGNORES_METRIC_PARAM); + } +} diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterRerouteAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterRerouteAction.java index 66d6aee30d00..fada07d60b74 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterRerouteAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterRerouteAction.java @@ -15,8 +15,11 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -39,6 +42,8 @@ import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; @ServerlessScope(Scope.INTERNAL) public class RestClusterRerouteAction extends BaseRestHandler { + private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestClusterRerouteAction.class); + private static final Set RESPONSE_PARAMS = addToCopy(Settings.FORMAT_PARAMS, "metric"); private static final ObjectParser PARSER = new ObjectParser<>("cluster_reroute"); @@ -51,7 +56,8 @@ public class RestClusterRerouteAction extends BaseRestHandler { PARSER.declareBoolean(ClusterRerouteRequest::dryRun, new ParseField("dry_run")); } - private static final String DEFAULT_METRICS = Strings.arrayToCommaDelimitedString( + @UpdateForV10(owner = UpdateForV10.Owner.DISTRIBUTED_COORDINATION) // no longer used, so can be removed + private static final String V8_DEFAULT_METRICS = Strings.arrayToCommaDelimitedString( EnumSet.complementOf(EnumSet.of(ClusterState.Metric.METADATA)).toArray() ); @@ -76,6 +82,11 @@ public class RestClusterRerouteAction extends BaseRestHandler { return true; } + @UpdateForV10(owner = UpdateForV10.Owner.DISTRIBUTED_COORDINATION) + // actually UpdateForV11 because V10 still supports the V9 API including this deprecation message + private static final String METRIC_DEPRECATION_MESSAGE = """ + the [?metric] query parameter to the [POST /_cluster/reroute] API has no effect; its use will be forbidden in a future version"""; + @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { ClusterRerouteRequest clusterRerouteRequest = createRequest(request); @@ -83,11 +94,24 @@ public class RestClusterRerouteAction extends BaseRestHandler { if (clusterRerouteRequest.explain()) { request.params().put("explain", Boolean.TRUE.toString()); } - // by default, return everything but metadata - final String metric = request.param("metric"); - if (metric == null) { - request.params().put("metric", DEFAULT_METRICS); + + switch (request.getRestApiVersion()) { + case V_9 -> { + // always avoid returning the cluster state by forcing `?metric=none`; emit a warning if `?metric` is even present + if (request.hasParam("metric")) { + deprecationLogger.critical(DeprecationCategory.API, "cluster-reroute-metric-param", METRIC_DEPRECATION_MESSAGE); + } + request.params().put("metric", "none"); + } + case V_8, V_7 -> { + // by default, return everything but metadata + final String metric = request.param("metric"); + if (metric == null) { + request.params().put("metric", V8_DEFAULT_METRICS); + } + } } + return channel -> client.execute( TransportClusterRerouteAction.TYPE, clusterRerouteRequest, diff --git a/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification b/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification index 143c0293c5ab..5cd8935f7240 100644 --- a/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification +++ b/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification @@ -16,6 +16,7 @@ org.elasticsearch.rest.RestFeatures org.elasticsearch.indices.IndicesFeatures org.elasticsearch.repositories.RepositoriesFeatures org.elasticsearch.action.admin.cluster.allocation.AllocationStatsFeatures +org.elasticsearch.rest.action.admin.cluster.ClusterRerouteFeatures org.elasticsearch.index.mapper.MapperFeatures org.elasticsearch.ingest.IngestGeoIpFeatures org.elasticsearch.search.SearchFeatures diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/MaxRetryAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/MaxRetryAllocationDeciderTests.java index 9d889f24acb6..c20d84fcf4b1 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/MaxRetryAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/MaxRetryAllocationDeciderTests.java @@ -181,7 +181,7 @@ public class MaxRetryAllocationDeciderTests extends ESAllocationTestCase { decision.getExplanation(), allOf( containsString("shard has exceeded the maximum number of retries"), - containsString("POST /_cluster/reroute?retry_failed&metric=none") + containsString("POST /_cluster/reroute?retry_failed") ) ); } @@ -280,7 +280,7 @@ public class MaxRetryAllocationDeciderTests extends ESAllocationTestCase { decision.getExplanation(), allOf( containsString("shard has exceeded the maximum number of retries"), - containsString("POST /_cluster/reroute?retry_failed&metric=none") + containsString("POST /_cluster/reroute?retry_failed") ) ); }); diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java index fefeaa95319e..f00b5b566c15 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java @@ -982,7 +982,7 @@ public class SearchableSnapshotActionIT extends ESRestTestCase { * notification that partial-index is now GREEN. */ private void triggerStateChange() throws IOException { - Request rerouteRequest = new Request("POST", "/_cluster/reroute?metric=none"); + Request rerouteRequest = new Request("POST", "/_cluster/reroute"); client().performRequest(rerouteRequest); } } From 4ecc5bd53e473174523fdb9c38655e326408e313 Mon Sep 17 00:00:00 2001 From: Panagiotis Bailis Date: Tue, 8 Oct 2024 10:15:10 +0300 Subject: [PATCH 29/85] Text_similarity_reranker retriever rework to be evaluated during rewrite phase (#114085) --- .../org/elasticsearch/TransportVersions.java | 1 + .../action/search/SearchPhaseController.java | 9 + .../elasticsearch/search/rank/RankDoc.java | 11 +- .../retriever/CompoundRetrieverBuilder.java | 4 +- .../search/sort/ShardDocSortField.java | 6 +- ...bstractRankDocWireSerializingTestCase.java | 57 +++ .../search/rank/RankDocTests.java | 16 +- .../xpack/inference/InferenceFeatures.java | 3 +- .../xpack/inference/InferencePlugin.java | 3 + .../textsimilarity/TextSimilarityRankDoc.java | 103 ++++++ .../TextSimilarityRankRetrieverBuilder.java | 141 ++++---- .../TextSimilarityRankDocTests.java | 88 +++++ ...xtSimilarityRankRetrieverBuilderTests.java | 121 +------ .../70_text_similarity_rank_retriever.yml | 40 ++- x-pack/plugin/rank-rrf/build.gradle | 4 + .../xpack/rank/rrf/RRFRankDoc.java | 6 + .../xpack/rank/rrf/RRFRankDocTests.java | 21 +- .../rrf/RRFRankClientYamlTestSuiteIT.java | 2 + ...ith_text_similarity_reranker_retriever.yml | 334 ++++++++++++++++++ 19 files changed, 767 insertions(+), 203 deletions(-) create mode 100644 server/src/test/java/org/elasticsearch/search/rank/AbstractRankDocWireSerializingTestCase.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankDoc.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankDocTests.java create mode 100644 x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/800_rrf_with_text_similarity_reranker_retriever.yml diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 1911013cbe8e..2095ba47ee37 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -236,6 +236,7 @@ public class TransportVersions { public static final TransportVersion INGEST_GEO_DATABASE_PROVIDERS = def(8_760_00_0); public static final TransportVersion DATE_TIME_DOC_VALUES_LOCALES = def(8_761_00_0); public static final TransportVersion FAST_REFRESH_RCO = def(8_762_00_0); + public static final TransportVersion TEXT_SIMILARITY_RERANKER_QUERY_REWRITE = def(8_763_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java index a6acb3ee2a52..1c4eb1c19137 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java @@ -36,6 +36,7 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchService; +import org.elasticsearch.search.SearchSortValues; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.InternalAggregations; @@ -51,6 +52,7 @@ import org.elasticsearch.search.profile.SearchProfileResultsBuilder; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rank.RankDoc; import org.elasticsearch.search.rank.context.QueryPhaseRankCoordinatorContext; +import org.elasticsearch.search.sort.ShardDocSortField; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.Suggest.Suggestion; import org.elasticsearch.search.suggest.completion.CompletionSuggestion; @@ -464,6 +466,13 @@ public final class SearchPhaseController { assert shardDoc instanceof RankDoc; searchHit.setRank(((RankDoc) shardDoc).rank); searchHit.score(shardDoc.score); + long shardAndDoc = ShardDocSortField.encodeShardAndDoc(shardDoc.shardIndex, shardDoc.doc); + searchHit.sortValues( + new SearchSortValues( + new Object[] { shardDoc.score, shardAndDoc }, + new DocValueFormat[] { DocValueFormat.RAW, DocValueFormat.RAW } + ) + ); } else if (sortedTopDocs.isSortedByField) { FieldDoc fieldDoc = (FieldDoc) shardDoc; searchHit.sortValues(fieldDoc.fields, reducedQueryPhase.sortValueFormats); diff --git a/server/src/main/java/org/elasticsearch/search/rank/RankDoc.java b/server/src/main/java/org/elasticsearch/search/rank/RankDoc.java index b16a23493111..9ab14aa9362b 100644 --- a/server/src/main/java/org/elasticsearch/search/rank/RankDoc.java +++ b/server/src/main/java/org/elasticsearch/search/rank/RankDoc.java @@ -11,9 +11,11 @@ package org.elasticsearch.search.rank; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.ScoreDoc; -import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.VersionedNamedWriteable; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; @@ -24,7 +26,7 @@ import java.util.Objects; * {@code RankDoc} is the base class for all ranked results. * Subclasses should extend this with additional information required for their global ranking method. */ -public class RankDoc extends ScoreDoc implements NamedWriteable, ToXContentFragment, Comparable { +public class RankDoc extends ScoreDoc implements VersionedNamedWriteable, ToXContentFragment, Comparable { public static final String NAME = "rank_doc"; @@ -40,6 +42,11 @@ public class RankDoc extends ScoreDoc implements NamedWriteable, ToXContentFragm return NAME; } + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.RANK_DOCS_RETRIEVER; + } + @Override public final int compareTo(RankDoc other) { if (score != other.score) { diff --git a/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java b/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java index 1962145d7336..22bef026523e 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/CompoundRetrieverBuilder.java @@ -160,7 +160,7 @@ public abstract class CompoundRetrieverBuilder> 32); } + + public static long encodeShardAndDoc(int shardIndex, int doc) { + return (((long) shardIndex) << 32) | (doc & 0xFFFFFFFFL); + } } diff --git a/server/src/test/java/org/elasticsearch/search/rank/AbstractRankDocWireSerializingTestCase.java b/server/src/test/java/org/elasticsearch/search/rank/AbstractRankDocWireSerializingTestCase.java new file mode 100644 index 000000000000..d0c85a33acf0 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/rank/AbstractRankDocWireSerializingTestCase.java @@ -0,0 +1,57 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.search.rank; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.search.SearchModule; +import org.elasticsearch.search.retriever.rankdoc.RankDocsQueryBuilder; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public abstract class AbstractRankDocWireSerializingTestCase extends AbstractWireSerializingTestCase { + + protected abstract T createTestRankDoc(); + + @Override + protected NamedWriteableRegistry writableRegistry() { + SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList()); + List entries = searchModule.getNamedWriteables(); + entries.addAll(getAdditionalNamedWriteables()); + return new NamedWriteableRegistry(entries); + } + + protected abstract List getAdditionalNamedWriteables(); + + @Override + protected T createTestInstance() { + return createTestRankDoc(); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + public void testRankDocSerialization() throws IOException { + int totalDocs = randomIntBetween(10, 100); + Set docs = new HashSet<>(); + for (int i = 0; i < totalDocs; i++) { + docs.add(createTestRankDoc()); + } + RankDocsQueryBuilder rankDocsQueryBuilder = new RankDocsQueryBuilder(docs.toArray((T[]) new RankDoc[0]), null, randomBoolean()); + RankDocsQueryBuilder copy = (RankDocsQueryBuilder) copyNamedWriteable(rankDocsQueryBuilder, writableRegistry(), QueryBuilder.class); + assertThat(rankDocsQueryBuilder, equalTo(copy)); + } +} diff --git a/server/src/test/java/org/elasticsearch/search/rank/RankDocTests.java b/server/src/test/java/org/elasticsearch/search/rank/RankDocTests.java index d190139309c3..21101b2bc7db 100644 --- a/server/src/test/java/org/elasticsearch/search/rank/RankDocTests.java +++ b/server/src/test/java/org/elasticsearch/search/rank/RankDocTests.java @@ -9,27 +9,29 @@ package org.elasticsearch.search.rank; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.test.AbstractWireSerializingTestCase; import java.io.IOException; +import java.util.Collections; +import java.util.List; -public class RankDocTests extends AbstractWireSerializingTestCase { +public class RankDocTests extends AbstractRankDocWireSerializingTestCase { - static RankDoc createTestRankDoc() { + protected RankDoc createTestRankDoc() { RankDoc rankDoc = new RankDoc(randomNonNegativeInt(), randomFloat(), randomIntBetween(0, 1)); rankDoc.rank = randomNonNegativeInt(); return rankDoc; } @Override - protected Writeable.Reader instanceReader() { - return RankDoc::new; + protected List getAdditionalNamedWriteables() { + return Collections.emptyList(); } @Override - protected RankDoc createTestInstance() { - return createTestRankDoc(); + protected Writeable.Reader instanceReader() { + return RankDoc::new; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java index 30ccb48d5c70..a3f210505463 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java @@ -27,7 +27,8 @@ public class InferenceFeatures implements FeatureSpecification { TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_RETRIEVER_SUPPORTED, RandomRankRetrieverBuilder.RANDOM_RERANKER_RETRIEVER_SUPPORTED, SemanticTextFieldMapper.SEMANTIC_TEXT_SEARCH_INFERENCE_ID, - SemanticQueryBuilder.SEMANTIC_TEXT_INNER_HITS + SemanticQueryBuilder.SEMANTIC_TEXT_INNER_HITS, + TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_COMPOSITION_SUPPORTED ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index dbb9130ab91e..927fd9480988 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -36,6 +36,7 @@ import org.elasticsearch.plugins.SystemIndexPlugin; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.search.rank.RankBuilder; +import org.elasticsearch.search.rank.RankDoc; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ScalingExecutorBuilder; import org.elasticsearch.xcontent.ParseField; @@ -66,6 +67,7 @@ import org.elasticsearch.xpack.inference.queries.SemanticQueryBuilder; import org.elasticsearch.xpack.inference.rank.random.RandomRankBuilder; import org.elasticsearch.xpack.inference.rank.random.RandomRankRetrieverBuilder; import org.elasticsearch.xpack.inference.rank.textsimilarity.TextSimilarityRankBuilder; +import org.elasticsearch.xpack.inference.rank.textsimilarity.TextSimilarityRankDoc; import org.elasticsearch.xpack.inference.rank.textsimilarity.TextSimilarityRankRetrieverBuilder; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.rest.RestDeleteInferenceEndpointAction; @@ -253,6 +255,7 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP var entries = new ArrayList<>(InferenceNamedWriteablesProvider.getNamedWriteables()); entries.add(new NamedWriteableRegistry.Entry(RankBuilder.class, TextSimilarityRankBuilder.NAME, TextSimilarityRankBuilder::new)); entries.add(new NamedWriteableRegistry.Entry(RankBuilder.class, RandomRankBuilder.NAME, RandomRankBuilder::new)); + entries.add(new NamedWriteableRegistry.Entry(RankDoc.class, TextSimilarityRankDoc.NAME, TextSimilarityRankDoc::new)); return entries; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankDoc.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankDoc.java new file mode 100644 index 000000000000..d208623e5332 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankDoc.java @@ -0,0 +1,103 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.rank.textsimilarity; + +import org.apache.lucene.search.Explanation; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.search.rank.RankDoc; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +public class TextSimilarityRankDoc extends RankDoc { + + public static final String NAME = "text_similarity_rank_doc"; + + public final String inferenceId; + public final String field; + + public TextSimilarityRankDoc(int doc, float score, int shardIndex, String inferenceId, String field) { + super(doc, score, shardIndex); + this.inferenceId = inferenceId; + this.field = field; + } + + public TextSimilarityRankDoc(StreamInput in) throws IOException { + super(in); + inferenceId = in.readString(); + field = in.readString(); + } + + @Override + public Explanation explain(Explanation[] sources, String[] queryNames) { + final String queryAlias = queryNames[0] == null ? "" : "[" + queryNames[0] + "]"; + return Explanation.match( + score, + "text_similarity_reranker match using inference endpoint: [" + + inferenceId + + "] on document field: [" + + field + + "] matching on source query " + + queryAlias, + sources + ); + } + + @Override + public void doWriteTo(StreamOutput out) throws IOException { + out.writeString(inferenceId); + out.writeString(field); + } + + @Override + public boolean doEquals(RankDoc rd) { + TextSimilarityRankDoc tsrd = (TextSimilarityRankDoc) rd; + return Objects.equals(inferenceId, tsrd.inferenceId) && Objects.equals(field, tsrd.field); + } + + @Override + public int doHashCode() { + return Objects.hash(inferenceId, field); + } + + @Override + public String toString() { + return "TextSimilarityRankDoc{" + + "doc=" + + doc + + ", shardIndex=" + + shardIndex + + ", score=" + + score + + ", inferenceId=" + + inferenceId + + ", field=" + + field + + '}'; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + protected void doToXContent(XContentBuilder builder, Params params) throws IOException { + builder.field("inferenceId", inferenceId); + builder.field("field", field); + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.TEXT_SIMILARITY_RERANKER_QUERY_REWRITE; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java index 50d762e7b90a..3ddaab12eca1 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilder.java @@ -7,14 +7,20 @@ package org.elasticsearch.xpack.inference.rank.textsimilarity; +import org.apache.lucene.search.ScoreDoc; import org.elasticsearch.common.ParsingException; import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.license.LicenseUtils; +import org.elasticsearch.search.builder.PointInTimeBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.fetch.StoredFieldsContext; +import org.elasticsearch.search.rank.RankDoc; +import org.elasticsearch.search.retriever.CompoundRetrieverBuilder; import org.elasticsearch.search.retriever.RetrieverBuilder; import org.elasticsearch.search.retriever.RetrieverParserContext; +import org.elasticsearch.search.retriever.rankdoc.RankDocsQueryBuilder; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; @@ -32,11 +38,14 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstr /** * A {@code RetrieverBuilder} for parsing and constructing a text similarity reranker retriever. */ -public class TextSimilarityRankRetrieverBuilder extends RetrieverBuilder { +public class TextSimilarityRankRetrieverBuilder extends CompoundRetrieverBuilder { public static final NodeFeature TEXT_SIMILARITY_RERANKER_RETRIEVER_SUPPORTED = new NodeFeature( "text_similarity_reranker_retriever_supported" ); + public static final NodeFeature TEXT_SIMILARITY_RERANKER_COMPOSITION_SUPPORTED = new NodeFeature( + "text_similarity_reranker_retriever_composition_supported" + ); public static final ParseField RETRIEVER_FIELD = new ParseField("retriever"); public static final ParseField INFERENCE_ID_FIELD = new ParseField("inference_id"); @@ -51,7 +60,6 @@ public class TextSimilarityRankRetrieverBuilder extends RetrieverBuilder { String inferenceText = (String) args[2]; String field = (String) args[3]; int rankWindowSize = args[4] == null ? DEFAULT_RANK_WINDOW_SIZE : (int) args[4]; - return new TextSimilarityRankRetrieverBuilder(retrieverBuilder, inferenceId, inferenceText, field, rankWindowSize); }); @@ -70,17 +78,20 @@ public class TextSimilarityRankRetrieverBuilder extends RetrieverBuilder { if (context.clusterSupportsFeature(TEXT_SIMILARITY_RERANKER_RETRIEVER_SUPPORTED) == false) { throw new ParsingException(parser.getTokenLocation(), "unknown retriever [" + TextSimilarityRankBuilder.NAME + "]"); } + if (context.clusterSupportsFeature(TEXT_SIMILARITY_RERANKER_COMPOSITION_SUPPORTED) == false) { + throw new UnsupportedOperationException( + "[text_similarity_reranker] retriever composition feature is not supported by all nodes in the cluster" + ); + } if (TextSimilarityRankBuilder.TEXT_SIMILARITY_RERANKER_FEATURE.check(XPackPlugin.getSharedLicenseState()) == false) { throw LicenseUtils.newComplianceException(TextSimilarityRankBuilder.NAME); } return PARSER.apply(parser, context); } - private final RetrieverBuilder retrieverBuilder; private final String inferenceId; private final String inferenceText; private final String field; - private final int rankWindowSize; public TextSimilarityRankRetrieverBuilder( RetrieverBuilder retrieverBuilder, @@ -89,15 +100,14 @@ public class TextSimilarityRankRetrieverBuilder extends RetrieverBuilder { String field, int rankWindowSize ) { - this.retrieverBuilder = retrieverBuilder; + super(List.of(new RetrieverSource(retrieverBuilder, null)), rankWindowSize); this.inferenceId = inferenceId; this.inferenceText = inferenceText; this.field = field; - this.rankWindowSize = rankWindowSize; } public TextSimilarityRankRetrieverBuilder( - RetrieverBuilder retrieverBuilder, + List retrieverSource, String inferenceId, String inferenceText, String field, @@ -106,66 +116,75 @@ public class TextSimilarityRankRetrieverBuilder extends RetrieverBuilder { String retrieverName, List preFilterQueryBuilders ) { - this.retrieverBuilder = retrieverBuilder; + super(retrieverSource, rankWindowSize); + if (retrieverSource.size() != 1) { + throw new IllegalArgumentException("[" + getName() + "] retriever should have exactly one inner retriever"); + } this.inferenceId = inferenceId; this.inferenceText = inferenceText; this.field = field; - this.rankWindowSize = rankWindowSize; this.minScore = minScore; this.retrieverName = retrieverName; this.preFilterQueryBuilders = preFilterQueryBuilders; } @Override - public QueryBuilder topDocsQuery() { - // the original matching set of the TextSimilarityRank retriever is specified by its nested retriever - return retrieverBuilder.topDocsQuery(); - } - - @Override - public RetrieverBuilder rewrite(QueryRewriteContext ctx) throws IOException { - // rewrite prefilters - boolean hasChanged = false; - var newPreFilters = rewritePreFilters(ctx); - hasChanged |= newPreFilters != preFilterQueryBuilders; - - // rewrite nested retriever - RetrieverBuilder newRetriever = retrieverBuilder.rewrite(ctx); - hasChanged |= newRetriever != retrieverBuilder; - if (hasChanged) { - return new TextSimilarityRankRetrieverBuilder( - newRetriever, - field, - inferenceText, - inferenceId, - rankWindowSize, - minScore, - this.retrieverName, - newPreFilters - ); - } - return this; - } - - @Override - public void extractToSearchSourceBuilder(SearchSourceBuilder searchSourceBuilder, boolean compoundUsed) { - retrieverBuilder.getPreFilterQueryBuilders().addAll(preFilterQueryBuilders); - retrieverBuilder.extractToSearchSourceBuilder(searchSourceBuilder, compoundUsed); - // Combining with other rank builder (such as RRF) is not supported yet - if (searchSourceBuilder.rankBuilder() != null) { - throw new IllegalArgumentException("text similarity rank builder cannot be combined with other rank builders"); - } - - searchSourceBuilder.rankBuilder( - new TextSimilarityRankBuilder(this.field, this.inferenceId, this.inferenceText, this.rankWindowSize, this.minScore) + protected TextSimilarityRankRetrieverBuilder clone(List newChildRetrievers) { + return new TextSimilarityRankRetrieverBuilder( + newChildRetrievers, + inferenceId, + inferenceText, + field, + rankWindowSize, + minScore, + retrieverName, + preFilterQueryBuilders ); } - /** - * Determines if this retriever contains sub-retrievers that need to be executed prior to search. - */ - public boolean isCompound() { - return retrieverBuilder.isCompound(); + @Override + protected RankDoc[] combineInnerRetrieverResults(List rankResults) { + assert rankResults.size() == 1; + ScoreDoc[] scoreDocs = rankResults.getFirst(); + TextSimilarityRankDoc[] textSimilarityRankDocs = new TextSimilarityRankDoc[scoreDocs.length]; + for (int i = 0; i < scoreDocs.length; i++) { + ScoreDoc scoreDoc = scoreDocs[i]; + textSimilarityRankDocs[i] = new TextSimilarityRankDoc(scoreDoc.doc, scoreDoc.score, scoreDoc.shardIndex, inferenceId, field); + } + return textSimilarityRankDocs; + } + + @Override + public QueryBuilder explainQuery() { + // the original matching set of the TextSimilarityRank retriever is specified by its nested retriever + return new RankDocsQueryBuilder(rankDocs, new QueryBuilder[] { innerRetrievers.getFirst().retriever().explainQuery() }, true); + } + + @Override + protected SearchSourceBuilder createSearchSourceBuilder(PointInTimeBuilder pit, RetrieverBuilder retrieverBuilder) { + var sourceBuilder = new SearchSourceBuilder().pointInTimeBuilder(pit) + .trackTotalHits(false) + .storedFields(new StoredFieldsContext(false)) + .size(rankWindowSize); + if (preFilterQueryBuilders.isEmpty() == false) { + retrieverBuilder.getPreFilterQueryBuilders().addAll(preFilterQueryBuilders); + } + retrieverBuilder.extractToSearchSourceBuilder(sourceBuilder, true); + + // apply the pre-filters + if (preFilterQueryBuilders.size() > 0) { + QueryBuilder query = sourceBuilder.query(); + BoolQueryBuilder newQuery = new BoolQueryBuilder(); + if (query != null) { + newQuery.must(query); + } + preFilterQueryBuilders.forEach(newQuery::filter); + sourceBuilder.query(newQuery); + } + sourceBuilder.rankBuilder( + new TextSimilarityRankBuilder(this.field, this.inferenceId, this.inferenceText, this.rankWindowSize, this.minScore) + ); + return sourceBuilder; } @Override @@ -179,7 +198,7 @@ public class TextSimilarityRankRetrieverBuilder extends RetrieverBuilder { @Override protected void doToXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(RETRIEVER_FIELD.getPreferredName(), retrieverBuilder); + builder.field(RETRIEVER_FIELD.getPreferredName(), innerRetrievers.getFirst().retriever()); builder.field(INFERENCE_ID_FIELD.getPreferredName(), inferenceId); builder.field(INFERENCE_TEXT_FIELD.getPreferredName(), inferenceText); builder.field(FIELD_FIELD.getPreferredName(), field); @@ -187,9 +206,9 @@ public class TextSimilarityRankRetrieverBuilder extends RetrieverBuilder { } @Override - protected boolean doEquals(Object other) { + public boolean doEquals(Object other) { TextSimilarityRankRetrieverBuilder that = (TextSimilarityRankRetrieverBuilder) other; - return Objects.equals(retrieverBuilder, that.retrieverBuilder) + return super.doEquals(other) && Objects.equals(inferenceId, that.inferenceId) && Objects.equals(inferenceText, that.inferenceText) && Objects.equals(field, that.field) @@ -198,7 +217,7 @@ public class TextSimilarityRankRetrieverBuilder extends RetrieverBuilder { } @Override - protected int doHashCode() { - return Objects.hash(retrieverBuilder, inferenceId, inferenceText, field, rankWindowSize, minScore); + public int doHashCode() { + return Objects.hash(inferenceId, inferenceText, field, rankWindowSize, minScore); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankDocTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankDocTests.java new file mode 100644 index 000000000000..fed4565c54bd --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankDocTests.java @@ -0,0 +1,88 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.rank.textsimilarity; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.search.rank.AbstractRankDocWireSerializingTestCase; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.InferencePlugin; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.search.rank.RankDoc.NO_RANK; + +public class TextSimilarityRankDocTests extends AbstractRankDocWireSerializingTestCase { + + static TextSimilarityRankDoc createTestTextSimilarityRankDoc() { + TextSimilarityRankDoc instance = new TextSimilarityRankDoc( + randomNonNegativeInt(), + randomFloat(), + randomBoolean() ? -1 : randomNonNegativeInt(), + randomAlphaOfLength(randomIntBetween(2, 5)), + randomAlphaOfLength(randomIntBetween(2, 5)) + ); + instance.rank = randomBoolean() ? NO_RANK : randomIntBetween(1, 10000); + return instance; + } + + @Override + protected List getAdditionalNamedWriteables() { + try (InferencePlugin plugin = new InferencePlugin(Settings.EMPTY)) { + return plugin.getNamedWriteables(); + } + } + + @Override + protected Writeable.Reader instanceReader() { + return TextSimilarityRankDoc::new; + } + + @Override + protected TextSimilarityRankDoc createTestRankDoc() { + return createTestTextSimilarityRankDoc(); + } + + @Override + protected TextSimilarityRankDoc mutateInstance(TextSimilarityRankDoc instance) throws IOException { + int doc = instance.doc; + int shardIndex = instance.shardIndex; + float score = instance.score; + int rank = instance.rank; + String inferenceId = instance.inferenceId; + String field = instance.field; + + switch (randomInt(5)) { + case 0: + doc = randomValueOtherThan(doc, ESTestCase::randomNonNegativeInt); + break; + case 1: + shardIndex = shardIndex == -1 ? randomNonNegativeInt() : -1; + break; + case 2: + score = randomValueOtherThan(score, ESTestCase::randomFloat); + break; + case 3: + rank = rank == NO_RANK ? randomIntBetween(1, 10000) : NO_RANK; + break; + case 4: + inferenceId = randomValueOtherThan(inferenceId, () -> randomAlphaOfLength(randomIntBetween(2, 5))); + break; + case 5: + field = randomValueOtherThan(field, () -> randomAlphaOfLength(randomIntBetween(2, 5))); + break; + default: + throw new AssertionError(); + } + TextSimilarityRankDoc mutated = new TextSimilarityRankDoc(doc, score, shardIndex, inferenceId, field); + mutated.rank = rank; + return mutated; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilderTests.java index 140b181a42a0..32301bf9efea 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverBuilderTests.java @@ -9,17 +9,9 @@ package org.elasticsearch.xpack.inference.rank.textsimilarity; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.Strings; -import org.elasticsearch.index.query.BoolQueryBuilder; -import org.elasticsearch.index.query.MatchAllQueryBuilder; -import org.elasticsearch.index.query.MatchNoneQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryRewriteContext; -import org.elasticsearch.index.query.RandomQueryBuilder; -import org.elasticsearch.index.query.RangeQueryBuilder; -import org.elasticsearch.index.query.Rewriteable; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.builder.SubSearchSourceBuilder; import org.elasticsearch.search.retriever.RetrieverBuilder; import org.elasticsearch.search.retriever.RetrieverParserContext; import org.elasticsearch.search.retriever.TestRetrieverBuilder; @@ -38,10 +30,8 @@ import java.util.ArrayList; import java.util.List; import static org.elasticsearch.search.rank.RankBuilder.DEFAULT_RANK_WINDOW_SIZE; -import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -import static org.mockito.Mockito.mock; public class TextSimilarityRankRetrieverBuilderTests extends AbstractXContentTestCase { @@ -82,6 +72,7 @@ public class TextSimilarityRankRetrieverBuilderTests extends AbstractXContentTes new SearchUsage(), nf -> nf == RetrieverBuilder.RETRIEVERS_SUPPORTED || nf == TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_RETRIEVER_SUPPORTED + || nf == TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_COMPOSITION_SUPPORTED ) ); } @@ -131,86 +122,6 @@ public class TextSimilarityRankRetrieverBuilderTests extends AbstractXContentTes } } - public void testRewriteInnerRetriever() throws IOException { - final boolean[] rewritten = { false }; - List preFilterQueryBuilders = new ArrayList<>(); - if (randomBoolean()) { - for (int i = 0; i < randomIntBetween(1, 5); i++) { - preFilterQueryBuilders.add(RandomQueryBuilder.createQuery(random())); - } - } - RetrieverBuilder innerRetriever = new TestRetrieverBuilder("top-level-retriever") { - @Override - public RetrieverBuilder rewrite(QueryRewriteContext ctx) throws IOException { - if (randomBoolean()) { - return this; - } - rewritten[0] = true; - return new TestRetrieverBuilder("nested-rewritten-retriever") { - @Override - public void extractToSearchSourceBuilder(SearchSourceBuilder searchSourceBuilder, boolean compoundUsed) { - if (preFilterQueryBuilders.isEmpty() == false) { - BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder(); - - for (QueryBuilder preFilterQueryBuilder : preFilterQueryBuilders) { - boolQueryBuilder.filter(preFilterQueryBuilder); - } - boolQueryBuilder.must(new RangeQueryBuilder("some_field")); - searchSourceBuilder.subSearches().add(new SubSearchSourceBuilder(boolQueryBuilder)); - } else { - searchSourceBuilder.subSearches().add(new SubSearchSourceBuilder(new RangeQueryBuilder("some_field"))); - } - } - }; - } - - @Override - public void extractToSearchSourceBuilder(SearchSourceBuilder searchSourceBuilder, boolean compoundUsed) { - if (preFilterQueryBuilders.isEmpty() == false) { - BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder(); - - for (QueryBuilder preFilterQueryBuilder : preFilterQueryBuilders) { - boolQueryBuilder.filter(preFilterQueryBuilder); - } - boolQueryBuilder.must(new TermQueryBuilder("field", "value")); - searchSourceBuilder.subSearches().add(new SubSearchSourceBuilder(boolQueryBuilder)); - } else { - searchSourceBuilder.subSearches().add(new SubSearchSourceBuilder(new TermQueryBuilder("field", "value"))); - } - } - }; - TextSimilarityRankRetrieverBuilder textSimilarityRankRetrieverBuilder = createRandomTextSimilarityRankRetrieverBuilder( - innerRetriever - ); - textSimilarityRankRetrieverBuilder.getPreFilterQueryBuilders().addAll(preFilterQueryBuilders); - SearchSourceBuilder source = new SearchSourceBuilder().retriever(textSimilarityRankRetrieverBuilder); - QueryRewriteContext queryRewriteContext = mock(QueryRewriteContext.class); - source = Rewriteable.rewrite(source, queryRewriteContext); - assertNull(source.retriever()); - if (false == preFilterQueryBuilders.isEmpty()) { - if (source.query() instanceof MatchAllQueryBuilder == false && source.query() instanceof MatchNoneQueryBuilder == false) { - assertThat(source.query(), instanceOf(BoolQueryBuilder.class)); - BoolQueryBuilder bq = (BoolQueryBuilder) source.query(); - assertFalse(bq.must().isEmpty()); - assertThat(bq.must().size(), equalTo(1)); - if (rewritten[0]) { - assertThat(bq.must().get(0), instanceOf(RangeQueryBuilder.class)); - } else { - assertThat(bq.must().get(0), instanceOf(TermQueryBuilder.class)); - } - for (int j = 0; j < bq.filter().size(); j++) { - assertEqualQueryOrMatchAllNone(bq.filter().get(j), preFilterQueryBuilders.get(j)); - } - } - } else { - if (rewritten[0]) { - assertThat(source.query(), instanceOf(RangeQueryBuilder.class)); - } else { - assertThat(source.query(), instanceOf(TermQueryBuilder.class)); - } - } - } - public void testTextSimilarityRetrieverParsing() throws IOException { String restContent = "{" + " \"retriever\": {" @@ -250,29 +161,6 @@ public class TextSimilarityRankRetrieverBuilderTests extends AbstractXContentTes } } - public void testIsCompound() { - RetrieverBuilder compoundInnerRetriever = new TestRetrieverBuilder(ESTestCase.randomAlphaOfLengthBetween(5, 10)) { - @Override - public boolean isCompound() { - return true; - } - }; - RetrieverBuilder nonCompoundInnerRetriever = new TestRetrieverBuilder(ESTestCase.randomAlphaOfLengthBetween(5, 10)) { - @Override - public boolean isCompound() { - return false; - } - }; - TextSimilarityRankRetrieverBuilder compoundTextSimilarityRankRetrieverBuilder = createRandomTextSimilarityRankRetrieverBuilder( - compoundInnerRetriever - ); - assertTrue(compoundTextSimilarityRankRetrieverBuilder.isCompound()); - TextSimilarityRankRetrieverBuilder nonCompoundTextSimilarityRankRetrieverBuilder = createRandomTextSimilarityRankRetrieverBuilder( - nonCompoundInnerRetriever - ); - assertFalse(nonCompoundTextSimilarityRankRetrieverBuilder.isCompound()); - } - public void testTopDocsQuery() { RetrieverBuilder innerRetriever = new TestRetrieverBuilder(ESTestCase.randomAlphaOfLengthBetween(5, 10)) { @Override @@ -281,11 +169,6 @@ public class TextSimilarityRankRetrieverBuilderTests extends AbstractXContentTes } }; TextSimilarityRankRetrieverBuilder retriever = createRandomTextSimilarityRankRetrieverBuilder(innerRetriever); - assertThat(retriever.topDocsQuery(), instanceOf(TermQueryBuilder.class)); + expectThrows(IllegalStateException.class, "Should not be called, missing a rewrite?", retriever::topDocsQuery); } - - private static void assertEqualQueryOrMatchAllNone(QueryBuilder actual, QueryBuilder expected) { - assertThat(actual, anyOf(instanceOf(MatchAllQueryBuilder.class), instanceOf(MatchNoneQueryBuilder.class), equalTo(expected))); - } - } diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/70_text_similarity_rank_retriever.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/70_text_similarity_rank_retriever.yml index e2c141705757..9a4d7f441616 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/70_text_similarity_rank_retriever.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/70_text_similarity_rank_retriever.yml @@ -87,11 +87,9 @@ setup: - length: { hits.hits: 2 } - match: { hits.hits.0._id: "doc_2" } - - match: { hits.hits.0._rank: 1 } - close_to: { hits.hits.0._score: { value: 0.4, error: 0.001 } } - match: { hits.hits.1._id: "doc_1" } - - match: { hits.hits.1._rank: 2 } - close_to: { hits.hits.1._score: { value: 0.2, error: 0.001 } } --- @@ -123,7 +121,6 @@ setup: - length: { hits.hits: 1 } - match: { hits.hits.0._id: "doc_1" } - - match: { hits.hits.0._rank: 1 } - close_to: { hits.hits.0._score: { value: 0.2, error: 0.001 } } @@ -178,3 +175,40 @@ setup: field: text size: 10 + +--- +"text similarity reranking with explain": + + - do: + search: + index: test-index + body: + track_total_hits: true + fields: [ "text", "topic" ] + retriever: { + text_similarity_reranker: { + retriever: + { + standard: { + query: { + term: { + topic: "science" + } + } + } + }, + rank_window_size: 10, + inference_id: my-rerank-model, + inference_text: "How often does the moon hide the sun?", + field: text + } + } + size: 10 + explain: true + + - match: { hits.hits.0._id: "doc_2" } + - match: { hits.hits.1._id: "doc_1" } + + - close_to: { hits.hits.0._explanation.value: { value: 0.4, error: 0.000001 } } + - match: {hits.hits.0._explanation.description: "/text_similarity_reranker.match.using.inference.endpoint:.\\[my-rerank-model\\].on.document.field:.\\[text\\].*/" } + - match: {hits.hits.0._explanation.details.0.description: "/weight.*science.*/" } diff --git a/x-pack/plugin/rank-rrf/build.gradle b/x-pack/plugin/rank-rrf/build.gradle index 2db33fa0f2c8..2c3f217243aa 100644 --- a/x-pack/plugin/rank-rrf/build.gradle +++ b/x-pack/plugin/rank-rrf/build.gradle @@ -20,7 +20,11 @@ dependencies { compileOnly project(path: xpackModule('core')) testImplementation(testArtifact(project(xpackModule('core')))) + testImplementation(testArtifact(project(':server'))) clusterModules project(xpackModule('rank-rrf')) + clusterModules project(xpackModule('inference')) clusterModules project(':modules:lang-painless') + + clusterPlugins project(':x-pack:plugin:inference:qa:test-service-plugin') } diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankDoc.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankDoc.java index 500ed1739512..272df248e53e 100644 --- a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankDoc.java +++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankDoc.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.rank.rrf; import org.apache.lucene.search.Explanation; +import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -169,4 +170,9 @@ public final class RRFRankDoc extends RankDoc { builder.field("scores", scores); builder.field("rankConstant", rankConstant); } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.RRF_QUERY_REWRITE; + } } diff --git a/x-pack/plugin/rank-rrf/src/test/java/org/elasticsearch/xpack/rank/rrf/RRFRankDocTests.java b/x-pack/plugin/rank-rrf/src/test/java/org/elasticsearch/xpack/rank/rrf/RRFRankDocTests.java index 4b64b6c173c9..5548392270a0 100644 --- a/x-pack/plugin/rank-rrf/src/test/java/org/elasticsearch/xpack/rank/rrf/RRFRankDocTests.java +++ b/x-pack/plugin/rank-rrf/src/test/java/org/elasticsearch/xpack/rank/rrf/RRFRankDocTests.java @@ -7,15 +7,17 @@ package org.elasticsearch.xpack.rank.rrf; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable.Reader; -import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.search.rank.AbstractRankDocWireSerializingTestCase; import org.elasticsearch.test.ESTestCase; import java.io.IOException; +import java.util.List; import static org.elasticsearch.xpack.rank.rrf.RRFRankDoc.NO_RANK; -public class RRFRankDocTests extends AbstractWireSerializingTestCase { +public class RRFRankDocTests extends AbstractRankDocWireSerializingTestCase { static RRFRankDoc createTestRRFRankDoc(int queryCount) { RRFRankDoc instance = new RRFRankDoc( @@ -35,9 +37,13 @@ public class RRFRankDocTests extends AbstractWireSerializingTestCase return instance; } - static RRFRankDoc createTestRRFRankDoc() { - int queryCount = randomIntBetween(2, 20); - return createTestRRFRankDoc(queryCount); + @Override + protected List getAdditionalNamedWriteables() { + try (RRFRankPlugin rrfRankPlugin = new RRFRankPlugin()) { + return rrfRankPlugin.getNamedWriteables(); + } catch (IOException ex) { + throw new AssertionError("Failed to create RRFRankPlugin", ex); + } } @Override @@ -46,8 +52,9 @@ public class RRFRankDocTests extends AbstractWireSerializingTestCase } @Override - protected RRFRankDoc createTestInstance() { - return createTestRRFRankDoc(); + protected RRFRankDoc createTestRankDoc() { + int queryCount = randomIntBetween(2, 20); + return createTestRRFRankDoc(queryCount); } @Override diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankClientYamlTestSuiteIT.java b/x-pack/plugin/rank-rrf/src/yamlRestTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankClientYamlTestSuiteIT.java index 3a577eb62faa..32b5aedd5d99 100644 --- a/x-pack/plugin/rank-rrf/src/yamlRestTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankClientYamlTestSuiteIT.java +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankClientYamlTestSuiteIT.java @@ -23,7 +23,9 @@ public class RRFRankClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { .nodes(2) .module("rank-rrf") .module("lang-painless") + .module("x-pack-inference") .setting("xpack.license.self_generated.type", "trial") + .plugin("inference-service-test") .build(); public RRFRankClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { diff --git a/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/800_rrf_with_text_similarity_reranker_retriever.yml b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/800_rrf_with_text_similarity_reranker_retriever.yml new file mode 100644 index 000000000000..3e758ae11f7e --- /dev/null +++ b/x-pack/plugin/rank-rrf/src/yamlRestTest/resources/rest-api-spec/test/rrf/800_rrf_with_text_similarity_reranker_retriever.yml @@ -0,0 +1,334 @@ +setup: + - requires: + cluster_features: ['rrf_retriever_composition_supported', 'text_similarity_reranker_retriever_supported'] + reason: need to have support for rrf and semantic reranking composition + test_runner_features: "close_to" + + - do: + inference.put: + task_type: rerank + inference_id: my-rerank-model + body: > + { + "service": "test_reranking_service", + "service_settings": { + "model_id": "my_model", + "api_key": "abc64" + }, + "task_settings": { + } + } + + + - do: + indices.create: + index: test-index + body: + settings: + number_of_shards: 1 + mappings: + properties: + text: + type: text + topic: + type: keyword + subtopic: + type: keyword + integer: + type: integer + + - do: + index: + index: test-index + id: doc_1 + body: + text: "Sun Moon Lake is a lake in Nantou County, Taiwan. It is the largest lake in Taiwan." + topic: [ "geography" ] + integer: 1 + + - do: + index: + index: test-index + id: doc_2 + body: + text: "The phases of the Moon come from the position of the Moon relative to the Earth and Sun." + topic: [ "science" ] + subtopic: [ "astronomy" ] + integer: 2 + + - do: + index: + index: test-index + id: doc_3 + body: + text: "As seen from Earth, a solar eclipse happens when the Moon is directly between the Earth and the Sun." + topic: [ "science" ] + subtopic: [ "technology" ] + integer: 3 + + - do: + indices.refresh: {} + +--- +"rrf retriever with a nested text similarity reranker": + + - do: + search: + index: test-index + body: + track_total_hits: true + fields: [ "text", "topic" ] + retriever: + rrf: { + retrievers: + [ + { + standard: { + query: { + bool: { + should: + [ + { + constant_score: { + filter: { + term: { + integer: 1 + } + }, + boost: 10 + } + }, + { + constant_score: + { + filter: + { + term: + { + integer: 2 + } + }, + boost: 1 + } + } + ] + } + } + } + }, + { + text_similarity_reranker: { + retriever: + { + standard: { + query: { + term: { + topic: "science" + } + } + } + }, + rank_window_size: 10, + inference_id: my-rerank-model, + inference_text: "How often does the moon hide the sun?", + field: text + } + } + ], + rank_window_size: 10, + rank_constant: 1 + } + size: 10 + from: 1 + aggs: + topics: + terms: + field: topic + size: 10 + + - match: { hits.total.value: 3 } + - length: { hits.hits: 2 } + + - match: { hits.hits.0._id: "doc_1" } + - match: { hits.hits.1._id: "doc_3" } + + - match: { aggregations.topics.buckets.0.key: "science" } + - match: { aggregations.topics.buckets.0.doc_count: 2 } + - match: { aggregations.topics.buckets.1.key: "geography" } + - match: { aggregations.topics.buckets.1.doc_count: 1 } + +--- +"Text similarity reranker on top of an RRF retriever": + + - do: + search: + index: test-index + body: + track_total_hits: true + fields: [ "text", "topic" ] + retriever: + { + text_similarity_reranker: { + retriever: + { + rrf: { + retrievers: + [ + { + standard: { + query: { + bool: { + should: + [ + { + constant_score: { + filter: { + term: { + integer: 1 + } + }, + boost: 10 + } + }, + { + constant_score: + { + filter: + { + term: + { + integer: 3 + } + }, + boost: 1 + } + } + ] + } + } + } + }, + { + standard: { + query: { + term: { + topic: "geography" + } + } + } + } + ], + rank_window_size: 10, + rank_constant: 1 + } + }, + rank_window_size: 10, + inference_id: my-rerank-model, + inference_text: "How often does the moon hide the sun?", + field: text + } + } + size: 10 + aggs: + topics: + terms: + field: topic + size: 10 + + - match: { hits.total.value: 2 } + - length: { hits.hits: 2 } + + - match: { hits.hits.0._id: "doc_3" } + - match: { hits.hits.1._id: "doc_1" } + + - match: { aggregations.topics.buckets.0.key: "geography" } + - match: { aggregations.topics.buckets.0.doc_count: 1 } + - match: { aggregations.topics.buckets.1.key: "science" } + - match: { aggregations.topics.buckets.1.doc_count: 1 } + + +--- +"explain using rrf retriever and text-similarity": + + - do: + search: + index: test-index + body: + track_total_hits: true + fields: [ "text", "topic" ] + retriever: + rrf: { + retrievers: + [ + { + standard: { + query: { + bool: { + should: + [ + { + constant_score: { + filter: { + term: { + integer: 1 + } + }, + boost: 10 + } + }, + { + constant_score: + { + filter: + { + term: + { + integer: 2 + } + }, + boost: 1 + } + } + ] + } + } + } + }, + { + text_similarity_reranker: { + retriever: + { + standard: { + query: { + term: { + topic: "science" + } + } + } + }, + rank_window_size: 10, + inference_id: my-rerank-model, + inference_text: "How often does the moon hide the sun?", + field: text + } + } + ], + rank_window_size: 10, + rank_constant: 1 + } + size: 10 + explain: true + + - match: { hits.hits.0._id: "doc_2" } + - match: { hits.hits.1._id: "doc_1" } + - match: { hits.hits.2._id: "doc_3" } + + - close_to: { hits.hits.0._explanation.value: { value: 0.6666667, error: 0.000001 } } + - match: {hits.hits.0._explanation.description: "/rrf.score:.\\[0.6666667\\].*/" } + - match: {hits.hits.0._explanation.details.0.value: 2} + - match: {hits.hits.0._explanation.details.0.description: "/rrf.score:.\\[0.33333334\\].*/" } + - match: {hits.hits.0._explanation.details.0.details.0.details.0.description: "/ConstantScore.*/" } + - match: {hits.hits.0._explanation.details.1.value: 2} + - match: {hits.hits.0._explanation.details.1.description: "/rrf.score:.\\[0.33333334\\].*/" } + - match: {hits.hits.0._explanation.details.1.details.0.description: "/text_similarity_reranker.match.using.inference.endpoint:.\\[my-rerank-model\\].on.document.field:.\\[text\\].*/" } + - match: {hits.hits.0._explanation.details.1.details.0.details.0.description: "/weight.*science.*/" } From bb9d612eb61d0d7d27be86bce356f4a64dfc7192 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 8 Oct 2024 08:47:28 +0100 Subject: [PATCH 30/85] Show only committed cluster UUID in `GET /` (#114275) Today we show `Metadata#clusterUUID` in the response to `GET /` regardless of whether this value is committed or not, which means that in theory users may see this value change even if nothing is going wrong. To avoid any doubt about the stability of this cluster UUID, this commit suppresses the cluster UUID in this API response until it is committed. --- .../rest/root/TransportMainAction.java | 3 +- .../rest/root/MainActionTests.java | 42 +++++++++++-------- .../cluster/metadata/Metadata.java | 5 +++ 3 files changed, 31 insertions(+), 19 deletions(-) diff --git a/modules/rest-root/src/main/java/org/elasticsearch/rest/root/TransportMainAction.java b/modules/rest-root/src/main/java/org/elasticsearch/rest/root/TransportMainAction.java index 15f23f751144..2598f943ea27 100644 --- a/modules/rest-root/src/main/java/org/elasticsearch/rest/root/TransportMainAction.java +++ b/modules/rest-root/src/main/java/org/elasticsearch/rest/root/TransportMainAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; @@ -48,7 +49,7 @@ public class TransportMainAction extends TransportAction responseRef = new AtomicReference<>(); - action.doExecute(mock(Task.class), new MainRequest(), new ActionListener<>() { - @Override - public void onResponse(MainResponse mainResponse) { - responseRef.set(mainResponse); - } + final AtomicBoolean listenerCalled = new AtomicBoolean(); + new TransportMainAction(settings, transportService, mock(ActionFilters.class), clusterService).doExecute( + mock(Task.class), + new MainRequest(), + ActionTestUtils.assertNoFailureListener(mainResponse -> { + assertNotNull(mainResponse); + assertEquals( + state.metadata().clusterUUIDCommitted() ? state.metadata().clusterUUID() : Metadata.UNKNOWN_CLUSTER_UUID, + mainResponse.getClusterUuid() + ); + assertFalse(listenerCalled.getAndSet(true)); + }) + ); - @Override - public void onFailure(Exception e) { - logger.error("unexpected error", e); - } - }); - - assertNotNull(responseRef.get()); + assertTrue(listenerCalled.get()); verify(clusterService, times(1)).state(); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java index 566571d82c8a..0756080c16d0 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java @@ -695,6 +695,11 @@ public class Metadata implements Iterable, Diffable, Ch return this.version; } + /** + * @return A UUID which identifies this cluster. Nodes record the UUID of the cluster they first join on disk, and will then refuse to + * join clusters with different UUIDs. Note that when the cluster is forming for the first time this value may not yet be committed, + * and therefore it may change. Check {@link #clusterUUIDCommitted()} to verify that the value is committed if needed. + */ public String clusterUUID() { return this.clusterUUID; } From 4af241b5d62cc36de7b617a2b7e5b6c068c49f34 Mon Sep 17 00:00:00 2001 From: kosabogi <105062005+kosabogi@users.noreply.github.com> Date: Tue, 8 Oct 2024 09:58:18 +0200 Subject: [PATCH 31/85] Adds note on reindexing existing data for semantic_text usage (#113590) * Adds note on reindexing existing data for semantic_text usage * Adds note about full crawl and full sync * Style guide related fix * Update docs/reference/search/search-your-data/semantic-search-semantic-text.asciidoc Co-authored-by: Liam Thompson <32779855+leemthompo@users.noreply.github.com> --------- Co-authored-by: Liam Thompson <32779855+leemthompo@users.noreply.github.com> --- .../semantic-search-semantic-text.asciidoc | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/docs/reference/search/search-your-data/semantic-search-semantic-text.asciidoc b/docs/reference/search/search-your-data/semantic-search-semantic-text.asciidoc index f1bd238a64fb..dbcfbb1b615f 100644 --- a/docs/reference/search/search-your-data/semantic-search-semantic-text.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search-semantic-text.asciidoc @@ -89,6 +89,16 @@ PUT semantic-embeddings It will be used to generate the embeddings based on the input text. Every time you ingest data into the related `semantic_text` field, this endpoint will be used for creating the vector representation of the text. +[NOTE] +==== +If you're using web crawlers or connectors to generate indices, you have to +<> for these indices to +include the `semantic_text` field. Once the mapping is updated, you'll need to run +a full web crawl or a full connector sync. This ensures that all existing +documents are reprocessed and updated with the new semantic embeddings, +enabling semantic search on the updated data. +==== + [discrete] [[semantic-text-load-data]] @@ -118,6 +128,13 @@ Create the embeddings from the text by reindexing the data from the `test-data` The data in the `content` field will be reindexed into the `content` semantic text field of the destination index. The reindexed data will be processed by the {infer} endpoint associated with the `content` semantic text field. +[NOTE] +==== +This step uses the reindex API to simulate data ingestion. If you are working with data that has already been indexed, +rather than using the test-data set, reindexing is required to ensure that the data is processed by the {infer} endpoint +and the necessary embeddings are generated. +==== + [source,console] ------------------------------------------------------------ POST _reindex?wait_for_completion=false From c3fd8a7a9a2645a29570955f7cf7a7b6d446f07d Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Tue, 8 Oct 2024 10:19:03 +0200 Subject: [PATCH 32/85] Configure Renovate to auto update chainguard image (#114172) * Adds renovate.json * Only auto update chainguard image --------- Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Co-authored-by: Rene Groeschke --- renovate.json | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 renovate.json diff --git a/renovate.json b/renovate.json new file mode 100644 index 000000000000..7dde3a9440ed --- /dev/null +++ b/renovate.json @@ -0,0 +1,21 @@ +{ + "$schema": "https://docs.renovatebot.com/renovate-schema.json", + "extends": [ + "github>elastic/renovate-config:only-chainguard" + ], + "customManagers": [ + { + "description": "Extract Wolfi images from elasticsearch DockerBase configuration", + "customType": "regex", + "fileMatch": [ + "build\\-tools\\-internal\\/src\\/main\\/java\\/org\\/elasticsearch\\/gradle\\/internal\\/DockerBase\\.java$" + ], + "matchStrings": [ + "\\s*\"?(?[^\\s:@\"]+)(?::(?[-a-zA-Z0-9.]+))?(?:@(?sha256:[a-zA-Z0-9]+))?\"?" + ], + "currentValueTemplate": "{{#if currentValue}}{{{currentValue}}}{{else}}latest{{/if}}", + "autoReplaceStringTemplate": "\"{{{depName}}}{{#if newValue}}:{{{newValue}}}{{/if}}{{#if newDigest}}@{{{newDigest}}}{{/if}}\"", + "datasourceTemplate": "docker" + } + ] +} From 09d5e7f20f8e218861ec4e80456844cf47b80517 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 8 Oct 2024 10:31:16 +0200 Subject: [PATCH 33/85] Remove dangling AwaitsFix (#113941) The issue referenced seems to be closed, maybe this just wasn't removed properly. --- .../SearchQueryThenFetchAsyncActionTests.java | 481 +++++++++--------- 1 file changed, 252 insertions(+), 229 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java index 3c4976d9bfa8..b63c88f623e2 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java @@ -258,11 +258,10 @@ public class SearchQueryThenFetchAsyncActionTests extends ESTestCase { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/101932") public void testMinimumVersionSameAsNewVersion() throws Exception { var newVersion = VersionInformation.CURRENT; var oldVersion = new VersionInformation( - VersionUtils.randomVersionBetween(random(), Version.CURRENT.minimumCompatibilityVersion(), VersionUtils.getPreviousVersion()), + VersionUtils.randomCompatibleVersion(random(), VersionUtils.getPreviousVersion()), IndexVersions.MINIMUM_COMPATIBLE, IndexVersionUtils.randomCompatibleVersion(random()) ); @@ -340,65 +339,69 @@ public class SearchQueryThenFetchAsyncActionTests extends ESTestCase { SearchTransportService searchTransportService = new SearchTransportService(null, null, null); SearchPhaseController controller = new SearchPhaseController((t, r) -> InternalAggregationTestCase.emptyReduceContextBuilder()); SearchTask task = new SearchTask(0, "n/a", "n/a", () -> "test", null, Collections.emptyMap()); - QueryPhaseResultConsumer resultConsumer = new QueryPhaseResultConsumer( - searchRequest, - EsExecutors.DIRECT_EXECUTOR_SERVICE, - new NoopCircuitBreaker(CircuitBreaker.REQUEST), - controller, - task::isCancelled, - task.getProgressListener(), - shardsIter.size(), - exc -> {} - ); - final List responses = new ArrayList<>(); - SearchQueryThenFetchAsyncAction newSearchAsyncAction = new SearchQueryThenFetchAsyncAction( - logger, - null, - searchTransportService, - (clusterAlias, node) -> lookup.get(node), - Collections.singletonMap("_na_", AliasFilter.EMPTY), - Collections.emptyMap(), - EsExecutors.DIRECT_EXECUTOR_SERVICE, - resultConsumer, - searchRequest, - new ActionListener<>() { - @Override - public void onFailure(Exception e) { - responses.add(e); - } + try ( + QueryPhaseResultConsumer resultConsumer = new QueryPhaseResultConsumer( + searchRequest, + EsExecutors.DIRECT_EXECUTOR_SERVICE, + new NoopCircuitBreaker(CircuitBreaker.REQUEST), + controller, + task::isCancelled, + task.getProgressListener(), + shardsIter.size(), + exc -> {} + ) + ) { + final List responses = new ArrayList<>(); + SearchQueryThenFetchAsyncAction newSearchAsyncAction = new SearchQueryThenFetchAsyncAction( + logger, + null, + searchTransportService, + (clusterAlias, node) -> lookup.get(node), + Collections.singletonMap("_na_", AliasFilter.EMPTY), + Collections.emptyMap(), + EsExecutors.DIRECT_EXECUTOR_SERVICE, + resultConsumer, + searchRequest, + new ActionListener<>() { + @Override + public void onFailure(Exception e) { + responses.add(e); + } - public void onResponse(SearchResponse response) { - responses.add(response); - }; - }, - shardsIter, - timeProvider, - new ClusterState.Builder(new ClusterName("test")).build(), - task, - SearchResponse.Clusters.EMPTY, - null - ); + public void onResponse(SearchResponse response) { + responses.add(response); + } - newSearchAsyncAction.start(); - assertThat(responses, hasSize(1)); - assertThat(responses.get(0), instanceOf(SearchPhaseExecutionException.class)); - SearchPhaseExecutionException e = (SearchPhaseExecutionException) responses.get(0); - assertThat(e.getCause(), instanceOf(VersionMismatchException.class)); - assertThat( - e.getCause().getMessage(), - equalTo("One of the shards is incompatible with the required minimum version [" + minVersion + "]") - ); + ; + }, + shardsIter, + timeProvider, + new ClusterState.Builder(new ClusterName("test")).build(), + task, + SearchResponse.Clusters.EMPTY, + null + ); + + newSearchAsyncAction.start(); + assertThat(responses, hasSize(1)); + assertThat(responses.get(0), instanceOf(SearchPhaseExecutionException.class)); + SearchPhaseExecutionException e = (SearchPhaseExecutionException) responses.get(0); + assertThat(e.getCause(), instanceOf(VersionMismatchException.class)); + assertThat( + e.getCause().getMessage(), + equalTo("One of the shards is incompatible with the required minimum version [" + minVersion + "]") + ); + } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/101932") public void testMinimumVersionSameAsOldVersion() throws Exception { - Version newVersion = Version.CURRENT; - Version oldVersion = VersionUtils.randomVersionBetween( - random(), - Version.CURRENT.minimumCompatibilityVersion(), - VersionUtils.getPreviousVersion(newVersion) + var newVersion = VersionInformation.CURRENT; + var oldVersion = new VersionInformation( + VersionUtils.randomCompatibleVersion(random(), VersionUtils.getPreviousVersion()), + IndexVersions.MINIMUM_COMPATIBLE, + IndexVersionUtils.randomCompatibleVersion(random()) ); - Version minVersion = oldVersion; + Version minVersion = oldVersion.nodeVersion(); final TransportSearchAction.SearchTimeProvider timeProvider = new TransportSearchAction.SearchTimeProvider( 0, @@ -456,98 +459,106 @@ public class SearchQueryThenFetchAsyncActionTests extends ESTestCase { new SearchShardTarget("node1", new ShardId("idx", "na", shardId), null), null ); - SortField sortField = new SortField("timestamp", SortField.Type.LONG); - if (shardId == 0) { - queryResult.topDocs( - new TopDocsAndMaxScore( - new TopFieldDocs( - new TotalHits(1, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), - new FieldDoc[] { new FieldDoc(randomInt(1000), Float.NaN, new Object[] { shardId }) }, - new SortField[] { sortField } + try { + SortField sortField = new SortField("timestamp", SortField.Type.LONG); + if (shardId == 0) { + queryResult.topDocs( + new TopDocsAndMaxScore( + new TopFieldDocs( + new TotalHits(1, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), + new FieldDoc[] { new FieldDoc(randomInt(1000), Float.NaN, new Object[] { shardId }) }, + new SortField[] { sortField } + ), + Float.NaN ), - Float.NaN - ), - new DocValueFormat[] { DocValueFormat.RAW } - ); - } else if (shardId == 1) { - queryResult.topDocs( - new TopDocsAndMaxScore( - new TopFieldDocs( - new TotalHits(1, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), - new FieldDoc[] { new FieldDoc(randomInt(1000), Float.NaN, new Object[] { shardId }) }, - new SortField[] { sortField } + new DocValueFormat[] { DocValueFormat.RAW } + ); + } else if (shardId == 1) { + queryResult.topDocs( + new TopDocsAndMaxScore( + new TopFieldDocs( + new TotalHits(1, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), + new FieldDoc[] { new FieldDoc(randomInt(1000), Float.NaN, new Object[] { shardId }) }, + new SortField[] { sortField } + ), + Float.NaN ), - Float.NaN - ), - new DocValueFormat[] { DocValueFormat.RAW } - ); + new DocValueFormat[] { DocValueFormat.RAW } + ); + } + queryResult.from(0); + queryResult.size(1); + successfulOps.incrementAndGet(); + queryResult.incRef(); + new Thread(() -> ActionListener.respondAndRelease(listener, queryResult)).start(); + } finally { + queryResult.decRef(); } - queryResult.from(0); - queryResult.size(1); - successfulOps.incrementAndGet(); - new Thread(() -> listener.onResponse(queryResult)).start(); } }; SearchPhaseController controller = new SearchPhaseController((t, r) -> InternalAggregationTestCase.emptyReduceContextBuilder()); SearchTask task = new SearchTask(0, "n/a", "n/a", () -> "test", null, Collections.emptyMap()); - QueryPhaseResultConsumer resultConsumer = new QueryPhaseResultConsumer( - searchRequest, - EsExecutors.DIRECT_EXECUTOR_SERVICE, - new NoopCircuitBreaker(CircuitBreaker.REQUEST), - controller, - task::isCancelled, - task.getProgressListener(), - shardsIter.size(), - exc -> {} - ); - CountDownLatch latch = new CountDownLatch(1); - SearchQueryThenFetchAsyncAction action = new SearchQueryThenFetchAsyncAction( - logger, - null, - searchTransportService, - (clusterAlias, node) -> lookup.get(node), - Collections.singletonMap("_na_", AliasFilter.EMPTY), - Collections.emptyMap(), - EsExecutors.DIRECT_EXECUTOR_SERVICE, - resultConsumer, - searchRequest, - null, - shardsIter, - timeProvider, - new ClusterState.Builder(new ClusterName("test")).build(), - task, - SearchResponse.Clusters.EMPTY, - null + try ( + QueryPhaseResultConsumer resultConsumer = new QueryPhaseResultConsumer( + searchRequest, + EsExecutors.DIRECT_EXECUTOR_SERVICE, + new NoopCircuitBreaker(CircuitBreaker.REQUEST), + controller, + task::isCancelled, + task.getProgressListener(), + shardsIter.size(), + exc -> {} + ) ) { - @Override - protected SearchPhase getNextPhase(SearchPhaseResults results, SearchPhaseContext context) { - return new SearchPhase("test") { - @Override - public void run() { - latch.countDown(); - } - }; - } - }; + CountDownLatch latch = new CountDownLatch(1); + SearchQueryThenFetchAsyncAction action = new SearchQueryThenFetchAsyncAction( + logger, + null, + searchTransportService, + (clusterAlias, node) -> lookup.get(node), + Collections.singletonMap("_na_", AliasFilter.EMPTY), + Collections.emptyMap(), + EsExecutors.DIRECT_EXECUTOR_SERVICE, + resultConsumer, + searchRequest, + null, + shardsIter, + timeProvider, + new ClusterState.Builder(new ClusterName("test")).build(), + task, + SearchResponse.Clusters.EMPTY, + null + ) { + @Override + protected SearchPhase getNextPhase(SearchPhaseResults results, SearchPhaseContext context) { + return new SearchPhase("test") { + @Override + public void run() { + latch.countDown(); + } + }; + } + }; - action.start(); - latch.await(); - assertThat(successfulOps.get(), equalTo(2)); - SearchPhaseController.ReducedQueryPhase phase = action.results.reduce(); - assertThat(phase.numReducePhases(), greaterThanOrEqualTo(1)); - assertThat(phase.totalHits().value, equalTo(2L)); - assertThat(phase.totalHits().relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + action.start(); + latch.await(); + assertThat(successfulOps.get(), equalTo(2)); + SearchPhaseController.ReducedQueryPhase phase = action.results.reduce(); + assertThat(phase.numReducePhases(), greaterThanOrEqualTo(1)); + assertThat(phase.totalHits().value, equalTo(2L)); + assertThat(phase.totalHits().relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/101932") public void testMinimumVersionShardDuringPhaseExecution() throws Exception { - Version newVersion = Version.CURRENT; - Version oldVersion = VersionUtils.randomVersionBetween( - random(), - Version.CURRENT.minimumCompatibilityVersion(), - VersionUtils.getPreviousVersion(newVersion) + var newVersion = VersionInformation.CURRENT; + var oldVersion = new VersionInformation( + VersionUtils.randomCompatibleVersion(random(), VersionUtils.getPreviousVersion()), + IndexVersions.MINIMUM_COMPATIBLE, + IndexVersionUtils.randomCompatibleVersion(random()) ); - Version minVersion = newVersion; + + Version minVersion = newVersion.nodeVersion(); final TransportSearchAction.SearchTimeProvider timeProvider = new TransportSearchAction.SearchTimeProvider( 0, @@ -607,111 +618,123 @@ public class SearchQueryThenFetchAsyncActionTests extends ESTestCase { new SearchShardTarget("node1", new ShardId("idx", "na", shardId), null), null ); - SortField sortField = new SortField("timestamp", SortField.Type.LONG); - if (shardId == 0) { - queryResult.topDocs( - new TopDocsAndMaxScore( - new TopFieldDocs( - new TotalHits(1, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), - new FieldDoc[] { new FieldDoc(randomInt(1000), Float.NaN, new Object[] { shardId }) }, - new SortField[] { sortField } + try { + SortField sortField = new SortField("timestamp", SortField.Type.LONG); + if (shardId == 0) { + queryResult.topDocs( + new TopDocsAndMaxScore( + new TopFieldDocs( + new TotalHits(1, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), + new FieldDoc[] { new FieldDoc(randomInt(1000), Float.NaN, new Object[] { shardId }) }, + new SortField[] { sortField } + ), + Float.NaN ), - Float.NaN - ), - new DocValueFormat[] { DocValueFormat.RAW } - ); - } else if (shardId == 1) { - queryResult.topDocs( - new TopDocsAndMaxScore( - new TopFieldDocs( - new TotalHits(1, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), - new FieldDoc[] { new FieldDoc(randomInt(1000), Float.NaN, new Object[] { shardId }) }, - new SortField[] { sortField } + new DocValueFormat[] { DocValueFormat.RAW } + ); + } else if (shardId == 1) { + queryResult.topDocs( + new TopDocsAndMaxScore( + new TopFieldDocs( + new TotalHits(1, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), + new FieldDoc[] { new FieldDoc(randomInt(1000), Float.NaN, new Object[] { shardId }) }, + new SortField[] { sortField } + ), + Float.NaN ), - Float.NaN - ), - new DocValueFormat[] { DocValueFormat.RAW } - ); + new DocValueFormat[] { DocValueFormat.RAW } + ); + } + queryResult.from(0); + queryResult.size(1); + successfulOps.incrementAndGet(); + queryResult.incRef(); + new Thread(() -> ActionListener.respondAndRelease(listener, queryResult)).start(); + } finally { + queryResult.decRef(); } - queryResult.from(0); - queryResult.size(1); - successfulOps.incrementAndGet(); - new Thread(() -> listener.onResponse(queryResult)).start(); } }; SearchPhaseController controller = new SearchPhaseController((t, r) -> InternalAggregationTestCase.emptyReduceContextBuilder()); SearchTask task = new SearchTask(0, "n/a", "n/a", () -> "test", null, Collections.emptyMap()); - QueryPhaseResultConsumer resultConsumer = new QueryPhaseResultConsumer( - searchRequest, - EsExecutors.DIRECT_EXECUTOR_SERVICE, - new NoopCircuitBreaker(CircuitBreaker.REQUEST), - controller, - task::isCancelled, - task.getProgressListener(), - shardsIter.size(), - exc -> {} - ); + CountDownLatch latch = new CountDownLatch(1); - SearchQueryThenFetchAsyncAction action = new SearchQueryThenFetchAsyncAction( - logger, - null, - searchTransportService, - (clusterAlias, node) -> lookup.get(node), - Collections.singletonMap("_na_", AliasFilter.EMPTY), - Collections.emptyMap(), - EsExecutors.DIRECT_EXECUTOR_SERVICE, - resultConsumer, - searchRequest, - null, - shardsIter, - timeProvider, - new ClusterState.Builder(new ClusterName("test")).build(), - task, - SearchResponse.Clusters.EMPTY, - null + try ( + QueryPhaseResultConsumer resultConsumer = new QueryPhaseResultConsumer( + searchRequest, + EsExecutors.DIRECT_EXECUTOR_SERVICE, + new NoopCircuitBreaker(CircuitBreaker.REQUEST), + controller, + task::isCancelled, + task.getProgressListener(), + shardsIter.size(), + exc -> {} + ) ) { - @Override - protected SearchPhase getNextPhase(SearchPhaseResults results, SearchPhaseContext context) { - return new SearchPhase("test") { - @Override - public void run() { - latch.countDown(); - } - }; - } - }; - ShardRouting routingOldVersionShard = ShardRouting.newUnassigned( - new ShardId(new Index("idx", "_na_"), 2), - true, - RecoverySource.EmptyStoreRecoverySource.INSTANCE, - new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foobar"), - ShardRouting.Role.DEFAULT - ); - SearchShardIterator shardIt = new SearchShardIterator( - null, - new ShardId(new Index("idx", "_na_"), 2), - singletonList(routingOldVersionShard), - idx - ); - routingOldVersionShard = routingOldVersionShard.initialize(oldVersionNode.getId(), "p2", 0); - routingOldVersionShard.started(); - action.start(); - latch.await(); - assertThat(successfulOps.get(), equalTo(2)); - SearchPhaseController.ReducedQueryPhase phase = action.results.reduce(); - assertThat(phase.numReducePhases(), greaterThanOrEqualTo(1)); - assertThat(phase.totalHits().value, equalTo(2L)); - assertThat(phase.totalHits().relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + SearchQueryThenFetchAsyncAction action = new SearchQueryThenFetchAsyncAction( + logger, + null, + searchTransportService, + (clusterAlias, node) -> lookup.get(node), + Collections.singletonMap("_na_", AliasFilter.EMPTY), + Collections.emptyMap(), + EsExecutors.DIRECT_EXECUTOR_SERVICE, + resultConsumer, + searchRequest, + null, + shardsIter, + timeProvider, + new ClusterState.Builder(new ClusterName("test")).build(), + task, + SearchResponse.Clusters.EMPTY, + null + ) { + @Override + protected SearchPhase getNextPhase(SearchPhaseResults results, SearchPhaseContext context) { + return new SearchPhase("test") { + @Override + public void run() { + latch.countDown(); + } + }; + } + }; + ShardRouting routingOldVersionShard = ShardRouting.newUnassigned( + new ShardId(new Index("idx", "_na_"), 2), + true, + RecoverySource.EmptyStoreRecoverySource.INSTANCE, + new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foobar"), + ShardRouting.Role.DEFAULT + ); + SearchShardIterator shardIt = new SearchShardIterator( + null, + new ShardId(new Index("idx", "_na_"), 2), + singletonList(routingOldVersionShard), + idx + ); + routingOldVersionShard = routingOldVersionShard.initialize(oldVersionNode.getId(), "p2", 0); + routingOldVersionShard.started(); + action.start(); + latch.await(); + assertThat(successfulOps.get(), equalTo(2)); + SearchPhaseController.ReducedQueryPhase phase = action.results.reduce(); + assertThat(phase.numReducePhases(), greaterThanOrEqualTo(1)); + assertThat(phase.totalHits().value, equalTo(2L)); + assertThat(phase.totalHits().relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); - SearchShardTarget searchShardTarget = new SearchShardTarget("node3", shardIt.shardId(), null); - SearchActionListener listener = new SearchActionListener(searchShardTarget, 0) { - @Override - public void onFailure(Exception e) {} + SearchShardTarget searchShardTarget = new SearchShardTarget("node3", shardIt.shardId(), null); + SearchActionListener listener = new SearchActionListener(searchShardTarget, 0) { + @Override + public void onFailure(Exception e) {} - @Override - protected void innerOnResponse(SearchPhaseResult response) {} - }; - Exception e = expectThrows(VersionMismatchException.class, () -> action.executePhaseOnShard(shardIt, searchShardTarget, listener)); - assertThat(e.getMessage(), equalTo("One of the shards is incompatible with the required minimum version [" + minVersion + "]")); + @Override + protected void innerOnResponse(SearchPhaseResult response) {} + }; + Exception e = expectThrows( + VersionMismatchException.class, + () -> action.executePhaseOnShard(shardIt, searchShardTarget, listener) + ); + assertThat(e.getMessage(), equalTo("One of the shards is incompatible with the required minimum version [" + minVersion + "]")); + } } } From 2a0d5ffc020df11bddcde9cd3b8ae666c4fc6457 Mon Sep 17 00:00:00 2001 From: ChrisHegarty Date: Tue, 8 Oct 2024 09:39:21 +0100 Subject: [PATCH 34/85] Fix simdvec gradle runtime java check --- libs/simdvec/build.gradle | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/libs/simdvec/build.gradle b/libs/simdvec/build.gradle index dab5c25b3467..eee56be72d0b 100644 --- a/libs/simdvec/build.gradle +++ b/libs/simdvec/build.gradle @@ -7,6 +7,7 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ +import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask apply plugin: 'elasticsearch.publish' @@ -32,7 +33,7 @@ tasks.matching { it.name == "compileMain21Java" }.configureEach { } tasks.named('test').configure { - if (JavaVersion.current().majorVersion.toInteger() >= 21) { + if (BuildParams.getRuntimeJavaVersion().majorVersion.toInteger() >= 21) { jvmArgs '--add-modules=jdk.incubator.vector' } } From 6d6fc66e90da3eddcd9f37d93361b77a6596fed9 Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Tue, 8 Oct 2024 10:45:35 +0100 Subject: [PATCH 35/85] Delegate xorBitCount to Lucene (#114249) Now that we're on Lucene 9.12 we don't need our own optimized xorBitCount, can just delegate to Lucene's optimized one (which is identical). --- .../vectors/ES815BitFlatVectorsFormat.java | 4 +- .../field/vectors/ByteBinaryDenseVector.java | 2 +- .../field/vectors/ByteKnnDenseVector.java | 2 +- .../script/field/vectors/ESVectorUtil.java | 73 ------------------- 4 files changed, 4 insertions(+), 77 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/script/field/vectors/ESVectorUtil.java diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java index 5969c9d5db6d..f0f25bd70274 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java @@ -17,11 +17,11 @@ import org.apache.lucene.codecs.lucene99.Lucene99FlatVectorsFormat; import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.VectorSimilarityFunction; +import org.apache.lucene.util.VectorUtil; import org.apache.lucene.util.hnsw.RandomAccessVectorValues; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; -import org.elasticsearch.script.field.vectors.ESVectorUtil; import java.io.IOException; @@ -105,7 +105,7 @@ class ES815BitFlatVectorsFormat extends FlatVectorsFormat { } static float hammingScore(byte[] a, byte[] b) { - return ((a.length * Byte.SIZE) - ESVectorUtil.xorBitCount(a, b)) / (float) (a.length * Byte.SIZE); + return ((a.length * Byte.SIZE) - VectorUtil.xorBitCount(a, b)) / (float) (a.length * Byte.SIZE); } static class HammingVectorScorer extends RandomVectorScorer.AbstractRandomVectorScorer { diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteBinaryDenseVector.java b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteBinaryDenseVector.java index a01d1fcbdb4e..8f13ada2fd60 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteBinaryDenseVector.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteBinaryDenseVector.java @@ -103,7 +103,7 @@ public class ByteBinaryDenseVector implements DenseVector { @Override public int hamming(byte[] queryVector) { - return ESVectorUtil.xorBitCount(queryVector, vectorValue); + return VectorUtil.xorBitCount(queryVector, vectorValue); } @Override diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteKnnDenseVector.java b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteKnnDenseVector.java index a4219583824c..42e5b5250199 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteKnnDenseVector.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteKnnDenseVector.java @@ -104,7 +104,7 @@ public class ByteKnnDenseVector implements DenseVector { @Override public int hamming(byte[] queryVector) { - return ESVectorUtil.xorBitCount(queryVector, docVector); + return VectorUtil.xorBitCount(queryVector, docVector); } @Override diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/ESVectorUtil.java b/server/src/main/java/org/elasticsearch/script/field/vectors/ESVectorUtil.java deleted file mode 100644 index 045a0e5e75b0..000000000000 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/ESVectorUtil.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.script.field.vectors; - -import org.apache.lucene.util.BitUtil; -import org.apache.lucene.util.Constants; - -/** - * This class consists of a single utility method that provides XOR bit count computed over signed bytes. - * Remove this class when Lucene version > 9.11 is released, and replace with Lucene's VectorUtil directly. - */ -public class ESVectorUtil { - - /** - * For xorBitCount we stride over the values as either 64-bits (long) or 32-bits (int) at a time. - * On ARM Long::bitCount is not vectorized, and therefore produces less than optimal code, when - * compared to Integer::bitCount. While Long::bitCount is optimal on x64. - */ - static final boolean XOR_BIT_COUNT_STRIDE_AS_INT = Constants.OS_ARCH.equals("aarch64"); - - /** - * XOR bit count computed over signed bytes. - * - * @param a bytes containing a vector - * @param b bytes containing another vector, of the same dimension - * @return the value of the XOR bit count of the two vectors - */ - public static int xorBitCount(byte[] a, byte[] b) { - if (a.length != b.length) { - throw new IllegalArgumentException("vector dimensions differ: " + a.length + "!=" + b.length); - } - if (XOR_BIT_COUNT_STRIDE_AS_INT) { - return xorBitCountInt(a, b); - } else { - return xorBitCountLong(a, b); - } - } - - /** XOR bit count striding over 4 bytes at a time. */ - static int xorBitCountInt(byte[] a, byte[] b) { - int distance = 0, i = 0; - for (final int upperBound = a.length & -Integer.BYTES; i < upperBound; i += Integer.BYTES) { - distance += Integer.bitCount((int) BitUtil.VH_NATIVE_INT.get(a, i) ^ (int) BitUtil.VH_NATIVE_INT.get(b, i)); - } - // tail: - for (; i < a.length; i++) { - distance += Integer.bitCount((a[i] ^ b[i]) & 0xFF); - } - return distance; - } - - /** XOR bit count striding over 8 bytes at a time. */ - static int xorBitCountLong(byte[] a, byte[] b) { - int distance = 0, i = 0; - for (final int upperBound = a.length & -Long.BYTES; i < upperBound; i += Long.BYTES) { - distance += Long.bitCount((long) BitUtil.VH_NATIVE_LONG.get(a, i) ^ (long) BitUtil.VH_NATIVE_LONG.get(b, i)); - } - // tail: - for (; i < a.length; i++) { - distance += Integer.bitCount((a[i] ^ b[i]) & 0xFF); - } - return distance; - } - - private ESVectorUtil() {} -} From cbde7f456d7ccd98556302fccf3238bb4557fc91 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Tue, 8 Oct 2024 11:42:22 +0100 Subject: [PATCH 36/85] Add a size limit to outputs from mustache (#114002) --- docs/changelog/114002.yaml | 5 ++ .../script/ScriptServiceBridge.java | 2 +- .../script/mustache/MustachePlugin.java | 2 +- .../script/mustache/MustacheScriptEngine.java | 27 +++++++- .../mustache/CustomMustacheFactoryTests.java | 7 +- .../mustache/MustacheScriptEngineTests.java | 28 +++++++- .../script/mustache/MustacheTests.java | 3 +- .../ingest/AbstractScriptTestCase.java | 2 +- .../common/text/SizeLimitingStringWriter.java | 69 +++++++++++++++++++ .../text/SizeLimitingStringWriterTests.java | 29 ++++++++ .../support/mapper/TemplateRoleNameTests.java | 14 ++-- .../SecurityQueryTemplateEvaluatorTests.java | 3 +- .../WildcardServiceProviderResolverTests.java | 2 +- .../ltr/LearningToRankServiceTests.java | 2 +- .../authc/ldap/ActiveDirectoryRealmTests.java | 2 +- .../security/authc/ldap/LdapRealmTests.java | 2 +- .../mapper/ClusterStateRoleMapperTests.java | 2 +- .../mapper/NativeRoleMappingStoreTests.java | 2 +- .../watcher/support/WatcherTemplateTests.java | 2 +- 19 files changed, 181 insertions(+), 24 deletions(-) create mode 100644 docs/changelog/114002.yaml create mode 100644 server/src/main/java/org/elasticsearch/common/text/SizeLimitingStringWriter.java create mode 100644 server/src/test/java/org/elasticsearch/common/text/SizeLimitingStringWriterTests.java diff --git a/docs/changelog/114002.yaml b/docs/changelog/114002.yaml new file mode 100644 index 000000000000..b6bc7e25bcde --- /dev/null +++ b/docs/changelog/114002.yaml @@ -0,0 +1,5 @@ +pr: 114002 +summary: Add a `mustache.max_output_size_bytes` setting to limit the length of results from mustache scripts +area: Infra/Scripting +type: enhancement +issues: [] diff --git a/libs/logstash-bridge/src/main/java/org/elasticsearch/logstashbridge/script/ScriptServiceBridge.java b/libs/logstash-bridge/src/main/java/org/elasticsearch/logstashbridge/script/ScriptServiceBridge.java index caf6e87c1a53..1f7a19e33330 100644 --- a/libs/logstash-bridge/src/main/java/org/elasticsearch/logstashbridge/script/ScriptServiceBridge.java +++ b/libs/logstash-bridge/src/main/java/org/elasticsearch/logstashbridge/script/ScriptServiceBridge.java @@ -53,7 +53,7 @@ public class ScriptServiceBridge extends StableBridgeAPI.Proxy im PainlessScriptEngine.NAME, new PainlessScriptEngine(settings, scriptContexts), MustacheScriptEngine.NAME, - new MustacheScriptEngine() + new MustacheScriptEngine(settings) ); return new ScriptService(settings, scriptEngines, ScriptModule.CORE_CONTEXTS, timeProvider); } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java index 64bc2799c24d..b24d60cb8d88 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java @@ -44,7 +44,7 @@ public class MustachePlugin extends Plugin implements ScriptPlugin, ActionPlugin @Override public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { - return new MustacheScriptEngine(); + return new MustacheScriptEngine(settings); } @Override diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngine.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngine.java index ca06a853b1ed..e7b172779151 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngine.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustacheScriptEngine.java @@ -14,6 +14,13 @@ import com.github.mustachejava.MustacheFactory; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.text.SizeLimitingStringWriter; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.MemorySizeValue; import org.elasticsearch.script.GeneralScriptException; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptContext; @@ -47,6 +54,19 @@ public final class MustacheScriptEngine implements ScriptEngine { public static final String NAME = "mustache"; + public static final Setting MUSTACHE_RESULT_SIZE_LIMIT = new Setting<>( + "mustache.max_output_size_bytes", + s -> "1mb", + s -> MemorySizeValue.parseBytesSizeValueOrHeapRatio(s, "mustache.max_output_size_bytes"), + Setting.Property.NodeScope + ); + + private final int sizeLimit; + + public MustacheScriptEngine(Settings settings) { + sizeLimit = (int) MUSTACHE_RESULT_SIZE_LIMIT.get(settings).getBytes(); + } + /** * Compile a template string to (in this case) a Mustache object than can * later be re-used for execution to fill in missing parameter values. @@ -118,10 +138,15 @@ public final class MustacheScriptEngine implements ScriptEngine { @Override public String execute() { - final StringWriter writer = new StringWriter(); + StringWriter writer = new SizeLimitingStringWriter(sizeLimit); try { template.execute(writer, params); } catch (Exception e) { + // size limit exception can appear at several places in the causal list depending on script & context + if (ExceptionsHelper.unwrap(e, SizeLimitingStringWriter.SizeLimitExceededException.class) != null) { + // don't log, client problem + throw new ElasticsearchParseException("Mustache script result size limit exceeded", e); + } if (shouldLogException(e)) { logger.error(() -> format("Error running %s", template), e); } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/CustomMustacheFactoryTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/CustomMustacheFactoryTests.java index 014d6854121b..eb9c1a6dc303 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/CustomMustacheFactoryTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/CustomMustacheFactoryTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.script.mustache; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptEngine; import org.elasticsearch.script.TemplateScript; @@ -65,7 +66,7 @@ public class CustomMustacheFactoryTests extends ESTestCase { } public void testJsonEscapeEncoder() { - final ScriptEngine engine = new MustacheScriptEngine(); + final ScriptEngine engine = new MustacheScriptEngine(Settings.EMPTY); final Map params = randomBoolean() ? Map.of(Script.CONTENT_TYPE_OPTION, JSON_MEDIA_TYPE) : Map.of(); TemplateScript.Factory compiled = engine.compile(null, "{\"field\": \"{{value}}\"}", TemplateScript.CONTEXT, params); @@ -75,7 +76,7 @@ public class CustomMustacheFactoryTests extends ESTestCase { } public void testDefaultEncoder() { - final ScriptEngine engine = new MustacheScriptEngine(); + final ScriptEngine engine = new MustacheScriptEngine(Settings.EMPTY); final Map params = Map.of(Script.CONTENT_TYPE_OPTION, PLAIN_TEXT_MEDIA_TYPE); TemplateScript.Factory compiled = engine.compile(null, "{\"field\": \"{{value}}\"}", TemplateScript.CONTEXT, params); @@ -85,7 +86,7 @@ public class CustomMustacheFactoryTests extends ESTestCase { } public void testUrlEncoder() { - final ScriptEngine engine = new MustacheScriptEngine(); + final ScriptEngine engine = new MustacheScriptEngine(Settings.EMPTY); final Map params = Map.of(Script.CONTENT_TYPE_OPTION, X_WWW_FORM_URLENCODED_MEDIA_TYPE); TemplateScript.Factory compiled = engine.compile(null, "{\"field\": \"{{value}}\"}", TemplateScript.CONTEXT, params); diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java index 089a154079a8..bc1cd30ad45b 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java @@ -8,8 +8,13 @@ */ package org.elasticsearch.script.mustache; +import com.github.mustachejava.MustacheException; import com.github.mustachejava.MustacheFactory; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.text.SizeLimitingStringWriter; import org.elasticsearch.script.GeneralScriptException; import org.elasticsearch.script.Script; import org.elasticsearch.script.TemplateScript; @@ -24,6 +29,9 @@ import java.util.Collections; import java.util.List; import java.util.Map; +import static org.elasticsearch.test.LambdaMatchers.transformedMatch; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.startsWith; @@ -37,7 +45,7 @@ public class MustacheScriptEngineTests extends ESTestCase { @Before public void setup() { - qe = new MustacheScriptEngine(); + qe = new MustacheScriptEngine(Settings.builder().put(MustacheScriptEngine.MUSTACHE_RESULT_SIZE_LIMIT.getKey(), "1kb").build()); factory = CustomMustacheFactory.builder().build(); } @@ -402,6 +410,24 @@ public class MustacheScriptEngineTests extends ESTestCase { } } + public void testResultSizeLimit() throws IOException { + String vals = "\"" + "{{val}}".repeat(200) + "\""; + String params = "\"val\":\"aaaaaaaaaa\""; + XContentParser parser = createParser(JsonXContent.jsonXContent, Strings.format("{\"source\":%s,\"params\":{%s}}", vals, params)); + Script script = Script.parse(parser); + var compiled = qe.compile(null, script.getIdOrCode(), TemplateScript.CONTEXT, Map.of()); + TemplateScript templateScript = compiled.newInstance(script.getParams()); + var ex = expectThrows(ElasticsearchParseException.class, templateScript::execute); + assertThat(ex.getCause(), instanceOf(MustacheException.class)); + assertThat( + ex.getCause().getCause(), + allOf( + instanceOf(SizeLimitingStringWriter.SizeLimitExceededException.class), + transformedMatch(Throwable::getMessage, endsWith("has exceeded the size limit [1024]")) + ) + ); + } + private String getChars() { String string = randomRealisticUnicodeOfCodepointLengthBetween(0, 10); for (int i = 0; i < string.length(); i++) { diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java index 82c4637f600f..335cfe91df87 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.script.mustache; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Strings; import org.elasticsearch.script.ScriptEngine; @@ -39,7 +40,7 @@ import static org.hamcrest.Matchers.not; public class MustacheTests extends ESTestCase { - private ScriptEngine engine = new MustacheScriptEngine(); + private ScriptEngine engine = new MustacheScriptEngine(Settings.EMPTY); public void testBasics() { String template = """ diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/java/org/elasticsearch/ingest/AbstractScriptTestCase.java b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/java/org/elasticsearch/ingest/AbstractScriptTestCase.java index ff6e75bed047..8d877bd48c1e 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/java/org/elasticsearch/ingest/AbstractScriptTestCase.java +++ b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/java/org/elasticsearch/ingest/AbstractScriptTestCase.java @@ -31,7 +31,7 @@ public abstract class AbstractScriptTestCase extends ESTestCase { @Before public void init() throws Exception { - MustacheScriptEngine engine = new MustacheScriptEngine(); + MustacheScriptEngine engine = new MustacheScriptEngine(Settings.EMPTY); Map engines = Collections.singletonMap(engine.getType(), engine); scriptService = new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS, () -> 1L); } diff --git a/server/src/main/java/org/elasticsearch/common/text/SizeLimitingStringWriter.java b/server/src/main/java/org/elasticsearch/common/text/SizeLimitingStringWriter.java new file mode 100644 index 000000000000..2df7e6537c60 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/text/SizeLimitingStringWriter.java @@ -0,0 +1,69 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.common.text; + +import org.elasticsearch.common.Strings; + +import java.io.StringWriter; + +/** + * A {@link StringWriter} that throws an exception if the string exceeds a specified size. + */ +public class SizeLimitingStringWriter extends StringWriter { + + public static class SizeLimitExceededException extends IllegalStateException { + public SizeLimitExceededException(String message) { + super(message); + } + } + + private final int sizeLimit; + + public SizeLimitingStringWriter(int sizeLimit) { + this.sizeLimit = sizeLimit; + } + + private void checkSizeLimit(int additionalChars) { + int bufLen = getBuffer().length(); + if (bufLen + additionalChars > sizeLimit) { + throw new SizeLimitExceededException( + Strings.format("String [%s...] has exceeded the size limit [%s]", getBuffer().substring(0, Math.min(bufLen, 20)), sizeLimit) + ); + } + } + + @Override + public void write(int c) { + checkSizeLimit(1); + super.write(c); + } + + // write(char[]) delegates to write(char[], int, int) + + @Override + public void write(char[] cbuf, int off, int len) { + checkSizeLimit(len); + super.write(cbuf, off, len); + } + + @Override + public void write(String str) { + checkSizeLimit(str.length()); + super.write(str); + } + + @Override + public void write(String str, int off, int len) { + checkSizeLimit(len); + super.write(str, off, len); + } + + // append(...) delegates to write(...) methods +} diff --git a/server/src/test/java/org/elasticsearch/common/text/SizeLimitingStringWriterTests.java b/server/src/test/java/org/elasticsearch/common/text/SizeLimitingStringWriterTests.java new file mode 100644 index 000000000000..32a8de20df9a --- /dev/null +++ b/server/src/test/java/org/elasticsearch/common/text/SizeLimitingStringWriterTests.java @@ -0,0 +1,29 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.common.text; + +import org.elasticsearch.test.ESTestCase; + +public class SizeLimitingStringWriterTests extends ESTestCase { + public void testSizeIsLimited() { + SizeLimitingStringWriter writer = new SizeLimitingStringWriter(10); + + writer.write("a".repeat(10)); + + // test all the methods + expectThrows(SizeLimitingStringWriter.SizeLimitExceededException.class, () -> writer.write('a')); + expectThrows(SizeLimitingStringWriter.SizeLimitExceededException.class, () -> writer.write("a")); + expectThrows(SizeLimitingStringWriter.SizeLimitExceededException.class, () -> writer.write(new char[1])); + expectThrows(SizeLimitingStringWriter.SizeLimitExceededException.class, () -> writer.write(new char[1], 0, 1)); + expectThrows(SizeLimitingStringWriter.SizeLimitExceededException.class, () -> writer.append('a')); + expectThrows(SizeLimitingStringWriter.SizeLimitExceededException.class, () -> writer.append("a")); + expectThrows(SizeLimitingStringWriter.SizeLimitExceededException.class, () -> writer.append("a", 0, 1)); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/support/mapper/TemplateRoleNameTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/support/mapper/TemplateRoleNameTests.java index 195e12666248..05044303561d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/support/mapper/TemplateRoleNameTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/support/mapper/TemplateRoleNameTests.java @@ -89,7 +89,7 @@ public class TemplateRoleNameTests extends ESTestCase { public void testEvaluateRoles() throws Exception { final ScriptService scriptService = new ScriptService( Settings.EMPTY, - Collections.singletonMap(MustacheScriptEngine.NAME, new MustacheScriptEngine()), + Collections.singletonMap(MustacheScriptEngine.NAME, new MustacheScriptEngine(Settings.EMPTY)), ScriptModule.CORE_CONTEXTS, () -> 1L ); @@ -145,7 +145,7 @@ public class TemplateRoleNameTests extends ESTestCase { public void testValidate() { final ScriptService scriptService = new ScriptService( Settings.EMPTY, - Collections.singletonMap(MustacheScriptEngine.NAME, new MustacheScriptEngine()), + Collections.singletonMap(MustacheScriptEngine.NAME, new MustacheScriptEngine(Settings.EMPTY)), ScriptModule.CORE_CONTEXTS, () -> 1L ); @@ -173,7 +173,7 @@ public class TemplateRoleNameTests extends ESTestCase { public void testValidateWillPassWithEmptyContext() { final ScriptService scriptService = new ScriptService( Settings.EMPTY, - Collections.singletonMap(MustacheScriptEngine.NAME, new MustacheScriptEngine()), + Collections.singletonMap(MustacheScriptEngine.NAME, new MustacheScriptEngine(Settings.EMPTY)), ScriptModule.CORE_CONTEXTS, () -> 1L ); @@ -204,7 +204,7 @@ public class TemplateRoleNameTests extends ESTestCase { public void testValidateWillFailForSyntaxError() { final ScriptService scriptService = new ScriptService( Settings.EMPTY, - Collections.singletonMap(MustacheScriptEngine.NAME, new MustacheScriptEngine()), + Collections.singletonMap(MustacheScriptEngine.NAME, new MustacheScriptEngine(Settings.EMPTY)), ScriptModule.CORE_CONTEXTS, () -> 1L ); @@ -268,7 +268,7 @@ public class TemplateRoleNameTests extends ESTestCase { final Settings settings = Settings.builder().put("script.allowed_types", ScriptService.ALLOW_NONE).build(); final ScriptService scriptService = new ScriptService( settings, - Collections.singletonMap(MustacheScriptEngine.NAME, new MustacheScriptEngine()), + Collections.singletonMap(MustacheScriptEngine.NAME, new MustacheScriptEngine(Settings.EMPTY)), ScriptModule.CORE_CONTEXTS, () -> 1L ); @@ -285,7 +285,7 @@ public class TemplateRoleNameTests extends ESTestCase { final Settings settings = Settings.builder().put("script.allowed_types", ScriptService.ALLOW_NONE).build(); final ScriptService scriptService = new ScriptService( settings, - Collections.singletonMap(MustacheScriptEngine.NAME, new MustacheScriptEngine()), + Collections.singletonMap(MustacheScriptEngine.NAME, new MustacheScriptEngine(Settings.EMPTY)), ScriptModule.CORE_CONTEXTS, () -> 1L ); @@ -314,7 +314,7 @@ public class TemplateRoleNameTests extends ESTestCase { public void testValidateWillFailWhenStoredScriptIsNotFound() { final ScriptService scriptService = new ScriptService( Settings.EMPTY, - Collections.singletonMap(MustacheScriptEngine.NAME, new MustacheScriptEngine()), + Collections.singletonMap(MustacheScriptEngine.NAME, new MustacheScriptEngine(Settings.EMPTY)), ScriptModule.CORE_CONTEXTS, () -> 1L ); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/support/SecurityQueryTemplateEvaluatorTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/support/SecurityQueryTemplateEvaluatorTests.java index fed11c75715b..58ae99df60bc 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/support/SecurityQueryTemplateEvaluatorTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/support/SecurityQueryTemplateEvaluatorTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.core.security.authz.support; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; @@ -94,7 +95,7 @@ public class SecurityQueryTemplateEvaluatorTests extends ESTestCase { true ); - final MustacheScriptEngine mustache = new MustacheScriptEngine(); + final MustacheScriptEngine mustache = new MustacheScriptEngine(Settings.EMPTY); when(scriptService.compile(any(Script.class), eq(TemplateScript.CONTEXT))).thenAnswer(inv -> { assertThat(inv.getArguments(), arrayWithSize(2)); diff --git a/x-pack/plugin/identity-provider/src/test/java/org/elasticsearch/xpack/idp/saml/sp/WildcardServiceProviderResolverTests.java b/x-pack/plugin/identity-provider/src/test/java/org/elasticsearch/xpack/idp/saml/sp/WildcardServiceProviderResolverTests.java index 70e5325878c0..832a6e8163ac 100644 --- a/x-pack/plugin/identity-provider/src/test/java/org/elasticsearch/xpack/idp/saml/sp/WildcardServiceProviderResolverTests.java +++ b/x-pack/plugin/identity-provider/src/test/java/org/elasticsearch/xpack/idp/saml/sp/WildcardServiceProviderResolverTests.java @@ -95,7 +95,7 @@ public class WildcardServiceProviderResolverTests extends IdpSamlTestCase { final Settings settings = Settings.EMPTY; final ScriptService scriptService = new ScriptService( settings, - Collections.singletonMap(MustacheScriptEngine.NAME, new MustacheScriptEngine()), + Collections.singletonMap(MustacheScriptEngine.NAME, new MustacheScriptEngine(Settings.EMPTY)), ScriptModule.CORE_CONTEXTS, () -> 1L ); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankServiceTests.java index 6ca9ae429678..46e54ff3f8c3 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankServiceTests.java @@ -241,7 +241,7 @@ public class LearningToRankServiceTests extends ESTestCase { } private ScriptService getTestScriptService() { - ScriptEngine scriptEngine = new MustacheScriptEngine(); + ScriptEngine scriptEngine = new MustacheScriptEngine(Settings.EMPTY); return new ScriptService(Settings.EMPTY, Map.of(DEFAULT_TEMPLATE_LANG, scriptEngine), ScriptModule.CORE_CONTEXTS, () -> 1L); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryRealmTests.java index b0821864aacc..e72bbd77697c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryRealmTests.java @@ -434,7 +434,7 @@ public class ActiveDirectoryRealmTests extends ESTestCase { final ScriptService scriptService = new ScriptService( settings, - Collections.singletonMap(MustacheScriptEngine.NAME, new MustacheScriptEngine()), + Collections.singletonMap(MustacheScriptEngine.NAME, new MustacheScriptEngine(Settings.EMPTY)), ScriptModule.CORE_CONTEXTS, () -> 1L ); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealmTests.java index 7083d1301a3e..83e2ff88f9dc 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealmTests.java @@ -533,7 +533,7 @@ public class LdapRealmTests extends LdapTestCase { final ScriptService scriptService = new ScriptService( defaultGlobalSettings, - Collections.singletonMap(MustacheScriptEngine.NAME, new MustacheScriptEngine()), + Collections.singletonMap(MustacheScriptEngine.NAME, new MustacheScriptEngine(Settings.EMPTY)), ScriptModule.CORE_CONTEXTS, () -> 1L ); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapperTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapperTests.java index 515b5ef741a0..063245e00447 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapperTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapperTests.java @@ -51,7 +51,7 @@ public class ClusterStateRoleMapperTests extends ESTestCase { public void setup() { scriptService = new ScriptService( Settings.EMPTY, - Collections.singletonMap(MustacheScriptEngine.NAME, new MustacheScriptEngine()), + Collections.singletonMap(MustacheScriptEngine.NAME, new MustacheScriptEngine(Settings.EMPTY)), ScriptModule.CORE_CONTEXTS, () -> 1L ); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java index 2a084bacfaf7..38f01d4d18bc 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java @@ -87,7 +87,7 @@ public class NativeRoleMappingStoreTests extends ESTestCase { public void setup() { scriptService = new ScriptService( Settings.EMPTY, - Collections.singletonMap(MustacheScriptEngine.NAME, new MustacheScriptEngine()), + Collections.singletonMap(MustacheScriptEngine.NAME, new MustacheScriptEngine(Settings.EMPTY)), ScriptModule.CORE_CONTEXTS, () -> 1L ); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherTemplateTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherTemplateTests.java index 5ddff34a0ac4..3018afbe9733 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherTemplateTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/WatcherTemplateTests.java @@ -37,7 +37,7 @@ public class WatcherTemplateTests extends ESTestCase { @Before public void init() throws Exception { - MustacheScriptEngine engine = new MustacheScriptEngine(); + MustacheScriptEngine engine = new MustacheScriptEngine(Settings.EMPTY); Map engines = Collections.singletonMap(engine.getType(), engine); Map> contexts = Collections.singletonMap( Watcher.SCRIPT_TEMPLATE_CONTEXT.name, From 436d4cc877a33f945ae3f4200efef3e76802623e Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 8 Oct 2024 11:44:31 +0100 Subject: [PATCH 37/85] Remove `SearchableSnapshotIndexMetadataUpgrader` (#114290) This service only exists to fix up the metadata on 7.x searchable snapshot indices during/after an upgrade to 8.x, so we don't need it in 9.x. --- .../SearchableSnapshots.java | 2 - ...archableSnapshotIndexMetadataUpgrader.java | 140 ------------ ...bleSnapshotIndexMetadataUpgraderTests.java | 205 ------------------ 3 files changed, 347 deletions(-) delete mode 100644 x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgrader.java delete mode 100644 x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java index 4eea006b4c2f..5ac8cdb43aa3 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java @@ -108,7 +108,6 @@ import org.elasticsearch.xpack.searchablesnapshots.rest.RestMountSearchableSnaps import org.elasticsearch.xpack.searchablesnapshots.rest.RestSearchableSnapshotsNodeCachesStatsAction; import org.elasticsearch.xpack.searchablesnapshots.rest.RestSearchableSnapshotsStatsAction; import org.elasticsearch.xpack.searchablesnapshots.store.SearchableSnapshotDirectory; -import org.elasticsearch.xpack.searchablesnapshots.upgrade.SearchableSnapshotIndexMetadataUpgrader; import java.io.IOException; import java.io.UncheckedIOException; @@ -359,7 +358,6 @@ public class SearchableSnapshots extends Plugin implements IndexStorePlugin, Eng components.add(new FrozenCacheServiceSupplier(frozenCacheService.get())); components.add(new CacheServiceSupplier(cacheService.get())); if (DiscoveryNode.isMasterNode(settings)) { - new SearchableSnapshotIndexMetadataUpgrader(clusterService, threadPool).initialize(); clusterService.addListener(new RepositoryUuidWatcher(services.rerouteService())); } return Collections.unmodifiableList(components); diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgrader.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgrader.java deleted file mode 100644 index ccdad61adee5..000000000000 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgrader.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.searchablesnapshots.upgrade; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.ClusterStateListener; -import org.elasticsearch.cluster.ClusterStateUpdateTask; -import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.SuppressForbidden; -import org.elasticsearch.index.IndexVersions; -import org.elasticsearch.indices.ShardLimitValidator; -import org.elasticsearch.threadpool.ThreadPool; - -import java.util.concurrent.Executor; -import java.util.concurrent.atomic.AtomicBoolean; - -/** - * This class upgrades frozen indices to apply the index.shard_limit.group=frozen setting after all nodes have been upgraded to 7.13+ - */ -public class SearchableSnapshotIndexMetadataUpgrader { - private static final Logger logger = LogManager.getLogger(SearchableSnapshotIndexMetadataUpgrader.class); - - private final ClusterService clusterService; - private final ThreadPool threadPool; - private final AtomicBoolean upgraded = new AtomicBoolean(); - private final ClusterStateListener listener = this::clusterChanged; - - public SearchableSnapshotIndexMetadataUpgrader(ClusterService clusterService, ThreadPool threadPool) { - this.clusterService = clusterService; - this.threadPool = threadPool; - } - - public void initialize() { - clusterService.addListener(listener); - } - - private void clusterChanged(ClusterChangedEvent event) { - if (upgraded.get()) { - return; - } - - if (event.localNodeMaster()) { - // only want one doing this at a time, assume it succeeds and reset if not. - if (upgraded.compareAndSet(false, true)) { - final Executor executor = threadPool.generic(); - executor.execute(() -> maybeUpgradeIndices(event.state())); - } - } - } - - private void maybeUpgradeIndices(ClusterState state) { - // 99% of the time, this will be a noop, so precheck that before adding a cluster state update. - if (needsUpgrade(state)) { - logger.info("Upgrading partial searchable snapshots to use frozen shard limit group"); - submitUnbatchedTask("searchable-snapshot-index-upgrader", new ClusterStateUpdateTask() { - @Override - public ClusterState execute(ClusterState currentState) throws Exception { - return upgradeIndices(currentState); - } - - @Override - public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { - clusterService.removeListener(listener); - } - - @Override - public void onFailure(Exception e) { - logger.warn( - "upgrading frozen indices to have frozen shard limit group failed, will retry on the next cluster state update", - e - ); - // let us try again later. - upgraded.set(false); - } - }); - } else { - clusterService.removeListener(listener); - } - } - - @SuppressForbidden(reason = "legacy usage of unbatched task") // TODO add support for batching here - private void submitUnbatchedTask(@SuppressWarnings("SameParameterValue") String source, ClusterStateUpdateTask task) { - clusterService.submitUnbatchedStateUpdateTask(source, task); - } - - static boolean needsUpgrade(ClusterState state) { - return state.metadata() - .stream() - .filter( - imd -> imd.getCompatibilityVersion().onOrAfter(IndexVersions.V_7_12_0) - && imd.getCompatibilityVersion().before(IndexVersions.V_8_0_0) - ) - .filter(IndexMetadata::isPartialSearchableSnapshot) - .map(IndexMetadata::getSettings) - .anyMatch(SearchableSnapshotIndexMetadataUpgrader::notFrozenShardLimitGroup); - } - - static ClusterState upgradeIndices(ClusterState currentState) { - if (needsUpgrade(currentState) == false) { - return currentState; - } - Metadata.Builder builder = Metadata.builder(currentState.metadata()); - currentState.metadata() - .stream() - .filter( - imd -> imd.getCompatibilityVersion().onOrAfter(IndexVersions.V_7_12_0) - && imd.getCompatibilityVersion().before(IndexVersions.V_8_0_0) - ) - .filter(imd -> imd.isPartialSearchableSnapshot() && notFrozenShardLimitGroup(imd.getSettings())) - .map(SearchableSnapshotIndexMetadataUpgrader::setShardLimitGroupFrozen) - .forEach(imd -> builder.put(imd, true)); - return ClusterState.builder(currentState).metadata(builder).build(); - } - - private static boolean notFrozenShardLimitGroup(org.elasticsearch.common.settings.Settings settings) { - return ShardLimitValidator.FROZEN_GROUP.equals(ShardLimitValidator.INDEX_SETTING_SHARD_LIMIT_GROUP.get(settings)) == false; - } - - private static IndexMetadata setShardLimitGroupFrozen(IndexMetadata indexMetadata) { - return IndexMetadata.builder(indexMetadata) - .settings( - Settings.builder() - .put(indexMetadata.getSettings()) - .put(ShardLimitValidator.INDEX_SETTING_SHARD_LIMIT_GROUP.getKey(), ShardLimitValidator.FROZEN_GROUP) - ) - .settingsVersion(indexMetadata.getSettingsVersion() + 1) - .build(); - } -} diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java deleted file mode 100644 index 8de5a3710284..000000000000 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java +++ /dev/null @@ -1,205 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.searchablesnapshots.upgrade; - -import org.apache.lucene.tests.util.LuceneTestCase; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.UpdateForV9; -import org.elasticsearch.index.IndexModule; -import org.elasticsearch.index.IndexVersion; -import org.elasticsearch.index.IndexVersions; -import org.elasticsearch.indices.ShardLimitValidator; -import org.elasticsearch.snapshots.SearchableSnapshotsSettings; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.index.IndexVersionUtils; - -import static org.elasticsearch.snapshots.SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_STORE_TYPE; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.sameInstance; - -@UpdateForV9(owner = UpdateForV9.Owner.DISTRIBUTED_COORDINATION) -@LuceneTestCase.AwaitsFix(bugUrl = "this testing a number of pre 8.0 upgrade scenarios so needs updating or removal for 9.0") -public class SearchableSnapshotIndexMetadataUpgraderTests extends ESTestCase { - - public void testNoUpgradeNeeded() { - Metadata.Builder metadataBuilder = randomMetadata( - normal(), - full(), - partial_8plusNoShardLimit(), - shardLimitGroupFrozen(partial_7_13plus()), - shardLimitGroupFrozen(partialNeedsUpgrade()) - ); - assertThat(needsUpgrade(metadataBuilder), is(false)); - } - - public void testNeedsUpgrade() { - assertThat( - needsUpgrade( - addIndex( - partialNeedsUpgrade(), - randomMetadata( - normal(), - full(), - partial_7_13plus(), - partialNeedsUpgrade(), - shardLimitGroupFrozen(partialNeedsUpgrade()) - ) - ) - ), - is(true) - ); - } - - public void testUpgradeIndices() { - Metadata.Builder metadataBuilder = addIndex( - partialNeedsUpgrade(), - randomMetadata(normal(), full(), partial_7_13plus(), partialNeedsUpgrade(), shardLimitGroupFrozen(partialNeedsUpgrade())) - ); - - ClusterState originalState = clusterState(metadataBuilder); - ClusterState upgradedState = SearchableSnapshotIndexMetadataUpgrader.upgradeIndices(originalState); - - assertThat(upgradedState, not(sameInstance(originalState))); - assertThat(upgradedState.metadata().indices().size(), equalTo(originalState.metadata().indices().size())); - - assertTrue(upgradedState.metadata().stream().anyMatch(upgraded -> { - IndexMetadata original = originalState.metadata().index(upgraded.getIndex()); - assertThat(original, notNullValue()); - if (upgraded.isPartialSearchableSnapshot() == false - || ShardLimitValidator.INDEX_SETTING_SHARD_LIMIT_GROUP.get(original.getSettings()) - .equals(ShardLimitValidator.FROZEN_GROUP)) { - assertThat(upgraded, sameInstance(original)); - return false; - } else { - assertThat(upgraded.isPartialSearchableSnapshot(), is(original.isPartialSearchableSnapshot())); - assertThat(upgraded.getNumberOfShards(), equalTo(original.getNumberOfShards())); - assertThat(upgraded.getNumberOfReplicas(), equalTo(original.getNumberOfReplicas())); - assertThat( - ShardLimitValidator.INDEX_SETTING_SHARD_LIMIT_GROUP.get(upgraded.getSettings()), - equalTo(ShardLimitValidator.FROZEN_GROUP) - ); - assertThat(upgraded.getSettingsVersion(), equalTo(original.getSettingsVersion() + 1)); - return true; - } - })); - - assertThat(SearchableSnapshotIndexMetadataUpgrader.needsUpgrade(upgradedState), is(false)); - } - - public void testNoopUpgrade() { - Metadata.Builder metadataBuilder = randomMetadata( - normal(), - full(), - partial_7_13plus(), - shardLimitGroupFrozen(partialNeedsUpgrade()), - partial_8plusNoShardLimit() - ); - ClusterState originalState = clusterState(metadataBuilder); - ClusterState upgradedState = SearchableSnapshotIndexMetadataUpgrader.upgradeIndices(originalState); - assertThat(upgradedState, sameInstance(originalState)); - } - - private Settings normal() { - return settings(IndexVersionUtils.randomVersion(random())).build(); - } - - /** - * Simulate an index mounted with no shard limit group. Notice that due to not applying the group during rolling upgrades, we can see - * other than 7.12 versions here, but not 8.0 (since a rolling upgrade to 8.0 requires an upgrade to 7.latest first). - */ - private Settings partialNeedsUpgrade() { - return searchableSnapshotSettings( - IndexVersionUtils.randomVersionBetween( - random(), - IndexVersions.V_7_12_0, - IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0) - ), - true - ); - } - - /** - * Simulate a 7.13plus mounted index with shard limit. - */ - private Settings partial_7_13plus() { - return shardLimitGroupFrozen( - searchableSnapshotSettings( - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_13_0, IndexVersion.current()), - true - ) - ); - } - - /** - * This is an illegal state, but we simulate it to capture that we do the version check - */ - private Settings partial_8plusNoShardLimit() { - return searchableSnapshotSettings( - IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()), - true - ); - } - - private Settings full() { - return searchableSnapshotSettings(IndexVersionUtils.randomVersion(random()), false); - } - - private Settings searchableSnapshotSettings(IndexVersion version, boolean partial) { - Settings.Builder settings = settings(version); - settings.put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), SEARCHABLE_SNAPSHOT_STORE_TYPE); - if (partial || randomBoolean()) { - settings.put(SearchableSnapshotsSettings.SNAPSHOT_PARTIAL_SETTING.getKey(), partial); - } - return settings.build(); - } - - private Settings shardLimitGroupFrozen(Settings settings) { - return Settings.builder() - .put(settings) - .put(ShardLimitValidator.INDEX_SETTING_SHARD_LIMIT_GROUP.getKey(), ShardLimitValidator.FROZEN_GROUP) - .build(); - } - - private Metadata.Builder addIndex(Settings settings, Metadata.Builder builder) { - builder.put( - IndexMetadata.builder(randomAlphaOfLength(10)) - .settings(settings) - .numberOfShards(between(1, 10)) - .numberOfReplicas(between(0, 10)) - .build(), - false - ); - return builder; - } - - private Metadata.Builder randomMetadata(Settings... indexSettingsList) { - Metadata.Builder builder = new Metadata.Builder(); - for (Settings settings : indexSettingsList) { - for (int i = 0; i < between(0, 10); ++i) { - addIndex(settings, builder); - } - } - return builder; - } - - private boolean needsUpgrade(Metadata.Builder metadataBuilder) { - return SearchableSnapshotIndexMetadataUpgrader.needsUpgrade(clusterState(metadataBuilder)); - } - - private ClusterState clusterState(Metadata.Builder metadataBuilder) { - return ClusterState.builder(ClusterName.DEFAULT).metadata(metadataBuilder).build(); - } - -} From ce07060dce69f961c0906079529e91c7dd7d4b48 Mon Sep 17 00:00:00 2001 From: Nikolaj Volgushev Date: Tue, 8 Oct 2024 13:09:39 +0200 Subject: [PATCH 38/85] Remove role mapping block check (#114223) This method is used in multiple contexts that may not all handle cluster-block exceptions gracefully, esp. since some types of cluster blocks are retryable. Removing this, and may follow up with handling the cluster state block for the affected transport actions instead. --- .../xpack/core/security/authz/RoleMappingMetadata.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleMappingMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleMappingMetadata.java index 8f78fdbccd92..da6ff6ad24c3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleMappingMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleMappingMetadata.java @@ -12,7 +12,6 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.AbstractNamedDiffable; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.NamedDiff; -import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; @@ -58,7 +57,6 @@ public final class RoleMappingMetadata extends AbstractNamedDiffable Date: Tue, 8 Oct 2024 22:16:13 +1100 Subject: [PATCH 39/85] Mute org.elasticsearch.index.SearchSlowLogTests testLevelPrecedence #114300 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 93893d7103af..81ec1c414b0c 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -370,6 +370,9 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/114188 - class: org.elasticsearch.ingest.geoip.IpinfoIpDataLookupsTests issue: https://github.com/elastic/elasticsearch/issues/114266 +- class: org.elasticsearch.index.SearchSlowLogTests + method: testLevelPrecedence + issue: https://github.com/elastic/elasticsearch/issues/114300 # Examples: # From ffea594158ed39afd62d94c88334cca5992632c7 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Tue, 8 Oct 2024 22:16:31 +1100 Subject: [PATCH 40/85] Mute org.elasticsearch.index.SearchSlowLogTests testTwoLoggersDifferentLevel #114301 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 81ec1c414b0c..d6d5d47af3a7 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -373,6 +373,9 @@ tests: - class: org.elasticsearch.index.SearchSlowLogTests method: testLevelPrecedence issue: https://github.com/elastic/elasticsearch/issues/114300 +- class: org.elasticsearch.index.SearchSlowLogTests + method: testTwoLoggersDifferentLevel + issue: https://github.com/elastic/elasticsearch/issues/114301 # Examples: # From b80272a1a4d457ba0a950c5f01975c8b280da58c Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Tue, 8 Oct 2024 13:51:02 +0200 Subject: [PATCH 41/85] [DOCS] Update URL (#114292) --- docs/reference/intro.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/intro.asciidoc b/docs/reference/intro.asciidoc index 21b20a7e2f5d..2908c55789ba 100644 --- a/docs/reference/intro.asciidoc +++ b/docs/reference/intro.asciidoc @@ -204,7 +204,7 @@ For general content, you have the following options for adding data to {es} indi If you're building a website or app, then you can call Elasticsearch APIs using an https://www.elastic.co/guide/en/elasticsearch/client/index.html[{es} client] in the programming language of your choice. If you use the Python client, then check out the `elasticsearch-labs` repo for various https://github.com/elastic/elasticsearch-labs/tree/main/notebooks/search/python-examples[example notebooks]. * {kibana-ref}/connect-to-elasticsearch.html#upload-data-kibana[File upload]: Use the {kib} file uploader to index single files for one-off testing and exploration. The GUI guides you through setting up your index and field mappings. * https://github.com/elastic/crawler[Web crawler]: Extract and index web page content into {es} documents. -* {enterprise-search-ref}/connectors.html[Connectors]: Sync data from various third-party data sources to create searchable, read-only replicas in {es}. +* <>: Sync data from various third-party data sources to create searchable, read-only replicas in {es}. [discrete] [[es-ingestion-overview-timestamped]] @@ -492,4 +492,4 @@ and restrictions. You can review the following guides to learn how to tune your * <> Many {es} options come with different performance considerations and trade-offs. The best way to determine the -optimal configuration for your use case is through https://www.elastic.co/elasticon/conf/2016/sf/quantitative-cluster-sizing[testing with your own data and queries]. \ No newline at end of file +optimal configuration for your use case is through https://www.elastic.co/elasticon/conf/2016/sf/quantitative-cluster-sizing[testing with your own data and queries]. From 10f6f255066590f9425081249a59ef72d16b1d8d Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Tue, 8 Oct 2024 14:08:47 +0200 Subject: [PATCH 42/85] [DOCS] Update re-ranking intro to remove confusion about stages (#114302) --- docs/reference/reranking/index.asciidoc | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/reference/reranking/index.asciidoc b/docs/reference/reranking/index.asciidoc index cc6f4a900742..3171be7c872d 100644 --- a/docs/reference/reranking/index.asciidoc +++ b/docs/reference/reranking/index.asciidoc @@ -1,12 +1,12 @@ [[re-ranking-overview]] = Re-ranking -Many search systems are built on two-stage retrieval pipelines. +Many search systems are built on multi-stage retrieval pipelines. -The first stage uses cheap, fast algorithms to find a broad set of possible matches. +Earlier stages use cheap, fast algorithms to find a broad set of possible matches. -The second stage uses a more powerful model, often machine learning-based, to reorder the documents. -This second step is called re-ranking. +Later stages use more powerful models, often machine learning-based, to reorder the documents. +This step is called re-ranking. Because the resource-intensive model is only applied to the smaller set of pre-filtered results, this approach returns more relevant results while still optimizing for search performance and computational costs. {es} supports various ranking and re-ranking techniques to optimize search relevance and performance. @@ -18,7 +18,7 @@ Because the resource-intensive model is only applied to the smaller set of pre-f [float] [[re-ranking-first-stage-pipeline]] -=== First stage: initial retrieval +=== Initial retrieval [float] [[re-ranking-ranking-overview-bm25]] @@ -45,7 +45,7 @@ Hybrid search techniques combine results from full-text and vector search pipeli [float] [[re-ranking-overview-second-stage]] -=== Second stage: Re-ranking +=== Re-ranking When using the following advanced re-ranking pipelines, first-stage retrieval mechanisms effectively generate a set of candidates. These candidates are funneled into the re-ranker to perform more computationally expensive re-ranking tasks. @@ -67,4 +67,4 @@ Learning To Rank involves training a machine learning model to build a ranking f LTR is best suited for when you have ample training data and need highly customized relevance tuning. include::semantic-reranking.asciidoc[] -include::learning-to-rank.asciidoc[] \ No newline at end of file +include::learning-to-rank.asciidoc[] From 6f518d437f528d794715db049a3b0f155e5317f9 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 8 Oct 2024 14:14:35 +0100 Subject: [PATCH 43/85] Remove unnecessary test overrides (#114291) These test overrides were introduced so that we had somewhere to hang an `@AwaitsFix` annotation, but now the tests are unmuted again there's no need for the overrides. Relates #108336 --- .../AzureStorageCleanupThirdPartyTests.java | 25 ------------------- 1 file changed, 25 deletions(-) diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java index 7d280f31ecf1..abd4f506a0bb 100644 --- a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureStorageCleanupThirdPartyTests.java @@ -60,31 +60,6 @@ public class AzureStorageCleanupThirdPartyTests extends AbstractThirdPartyReposi AzureHttpFixture.sharedKeyForAccountPredicate(AZURE_ACCOUNT) ); - @Override - public void testCreateSnapshot() { - super.testCreateSnapshot(); - } - - @Override - public void testIndexLatest() throws Exception { - super.testIndexLatest(); - } - - @Override - public void testListChildren() { - super.testListChildren(); - } - - @Override - public void testCleanup() throws Exception { - super.testCleanup(); - } - - @Override - public void testReadFromPositionWithLength() { - super.testReadFromPositionWithLength(); - } - @Override protected Collection> getPlugins() { return pluginList(AzureRepositoryPlugin.class); From d9dc165db24a9a99664fbde49ac8b321f414b918 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Tue, 8 Oct 2024 14:32:57 +0100 Subject: [PATCH 44/85] Further conversions to ChunkedXContentBuilder (#114237) --- .../reroute/ClusterRerouteResponse.java | 29 ++--- .../elasticsearch/cluster/ClusterState.java | 6 +- .../xcontent/ChunkedToXContentBuilder.java | 20 +++ .../xcontent/ChunkedToXContentHelper.java | 16 --- .../script/ScriptCacheStats.java | 57 +++------ .../reroute/ClusterRerouteResponseTests.java | 14 ++- .../ChunkedToXContentBuilderTests.java | 88 +++++++++++++ .../ChunkedToXContentHelperTests.java | 119 ------------------ .../xpack/esql/action/EsqlQueryResponse.java | 77 ++++-------- .../esql/action/EsqlQueryResponseTests.java | 8 +- .../shutdown/SingleNodeShutdownStatus.java | 51 ++++---- 11 files changed, 203 insertions(+), 282 deletions(-) create mode 100644 server/src/test/java/org/elasticsearch/common/xcontent/ChunkedToXContentBuilderTests.java delete mode 100644 server/src/test/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelperTests.java diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java index b0ec0968f8d1..7b344a4c25a1 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponse.java @@ -13,12 +13,11 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.master.IsAcknowledgedSupplier; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.allocation.RoutingExplanations; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.core.UpdateForV10; @@ -26,7 +25,6 @@ import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.xcontent.ToXContent; import java.io.IOException; -import java.util.Collections; import java.util.Iterator; import java.util.Objects; @@ -98,20 +96,15 @@ public class ClusterRerouteResponse extends ActionResponse implements IsAcknowle } @Override - public Iterator toXContentChunkedV7(ToXContent.Params outerParams) { - return Iterators.concat( - Iterators.single((builder, params) -> builder.startObject().field(ACKNOWLEDGED_KEY, isAcknowledged())), - emitState(outerParams) - ? ChunkedToXContentHelper.wrapWithObject("state", state.toXContentChunked(outerParams)) - : Collections.emptyIterator(), - Iterators.single((builder, params) -> { - if (params.paramAsBoolean("explain", false)) { - explanations.toXContent(builder, params); - } - - builder.endObject(); - return builder; - }) - ); + public Iterator toXContentChunkedV7(ToXContent.Params params) { + return ChunkedToXContent.builder(params).object(b -> { + b.field(ACKNOWLEDGED_KEY, isAcknowledged()); + if (emitState(params)) { + b.xContentObject("state", state); + } + if (params.paramAsBoolean("explain", false)) { + b.append(explanations); + } + }); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java index cafda93dda9a..64df6e77326e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java @@ -759,10 +759,8 @@ public class ClusterState implements ChunkedToXContent, Diffable { // customs metrics.contains(Metric.CUSTOMS) - ? Iterators.flatMap( - customs.entrySet().iterator(), - cursor -> ChunkedToXContentHelper.wrapWithObject(cursor.getKey(), cursor.getValue().toXContentChunked(outerParams)) - ) + ? ChunkedToXContent.builder(outerParams) + .forEach(customs.entrySet().iterator(), (b, e) -> b.xContentObject(e.getKey(), e.getValue())) : Collections.emptyIterator() ); } diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentBuilder.java b/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentBuilder.java index 0868a7fa303a..0102e58c7c1d 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentBuilder.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentBuilder.java @@ -38,6 +38,10 @@ public class ChunkedToXContentBuilder implements Iterator { builder.add(Objects.requireNonNull(content)); } + public ToXContent.Params params() { + return params; + } + private void startObject() { addChunk((b, p) -> b.startObject()); } @@ -259,6 +263,16 @@ public class ChunkedToXContentBuilder implements Iterator { return this; } + /** + * Creates an array with the contents set by appending together the contents of {@code items} + */ + public ChunkedToXContentBuilder array(Iterator items) { + startArray(); + items.forEachRemaining(this::append); + endArray(); + return this; + } + /** * Creates an array, with the contents set by appending together * the return values of {@code create} called on each item returned by {@code items} @@ -351,6 +365,12 @@ public class ChunkedToXContentBuilder implements Iterator { return this; } + public ChunkedToXContentBuilder field(String name, ChunkedToXContent value) { + addChunk((b, p) -> b.field(name)); + append(value); + return this; + } + public ChunkedToXContentBuilder field(String name, Object value) { addChunk((b, p) -> b.field(name, value)); return this; diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java b/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java index 940d4495ae90..fcbe0ac2b2ed 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java @@ -53,26 +53,10 @@ public enum ChunkedToXContentHelper { return Iterators.single(((builder, params) -> builder.field(name, value))); } - /** - * Creates an Iterator to serialize a named field where the value is represented by a {@link ChunkedToXContentObject}. - * Chunked equivalent for {@code XContentBuilder field(String name, ToXContent value)} - * @param name name of the field - * @param value value for this field - * @param params params to propagate for XContent serialization - * @return Iterator composing field name and value serialization - */ - public static Iterator field(String name, ChunkedToXContentObject value, ToXContent.Params params) { - return Iterators.concat(Iterators.single((builder, innerParam) -> builder.field(name)), value.toXContentChunked(params)); - } - public static Iterator array(String name, Iterator contents) { return Iterators.concat(ChunkedToXContentHelper.startArray(name), contents, ChunkedToXContentHelper.endArray()); } - public static Iterator wrapWithObject(String name, Iterator iterator) { - return Iterators.concat(startObject(name), iterator, endObject()); - } - /** * Creates an Iterator of a single ToXContent object that serializes the given object as a single chunk. Just wraps {@link * Iterators#single}, but still useful because it avoids any type ambiguity. diff --git a/server/src/main/java/org/elasticsearch/script/ScriptCacheStats.java b/server/src/main/java/org/elasticsearch/script/ScriptCacheStats.java index a9a89a3fa761..adc1f65b8873 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptCacheStats.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptCacheStats.java @@ -21,15 +21,8 @@ import java.util.Collections; import java.util.Iterator; import java.util.Map; import java.util.Objects; +import java.util.function.Function; -import static org.elasticsearch.common.collect.Iterators.concat; -import static org.elasticsearch.common.collect.Iterators.flatMap; -import static org.elasticsearch.common.collect.Iterators.single; -import static org.elasticsearch.common.xcontent.ChunkedToXContentHelper.endArray; -import static org.elasticsearch.common.xcontent.ChunkedToXContentHelper.endObject; -import static org.elasticsearch.common.xcontent.ChunkedToXContentHelper.field; -import static org.elasticsearch.common.xcontent.ChunkedToXContentHelper.startArray; -import static org.elasticsearch.common.xcontent.ChunkedToXContentHelper.startObject; import static org.elasticsearch.script.ScriptCacheStats.Fields.SCRIPT_CACHE_STATS; // This class is deprecated in favor of ScriptStats and ScriptContextStats @@ -76,35 +69,25 @@ public record ScriptCacheStats(Map context, ScriptStats gen @Override public Iterator toXContentChunked(ToXContent.Params outerParams) { - return concat( - startObject(SCRIPT_CACHE_STATS), - startObject(Fields.SUM), - general != null - ? concat( - field(ScriptStats.Fields.COMPILATIONS, general.getCompilations()), - field(ScriptStats.Fields.CACHE_EVICTIONS, general.getCacheEvictions()), - field(ScriptStats.Fields.COMPILATION_LIMIT_TRIGGERED, general.getCompilationLimitTriggered()), - endObject(), - endObject() - ) - : concat(single((builder, params) -> { - var sum = sum(); - return builder.field(ScriptStats.Fields.COMPILATIONS, sum.getCompilations()) - .field(ScriptStats.Fields.CACHE_EVICTIONS, sum.getCacheEvictions()) - .field(ScriptStats.Fields.COMPILATION_LIMIT_TRIGGERED, sum.getCompilationLimitTriggered()) - .endObject(); - }), startArray(Fields.CONTEXTS), flatMap(context.keySet().stream().sorted().iterator(), ctx -> { - var stats = context.get(ctx); - return concat( - startObject(), - field(Fields.CONTEXT, ctx), - field(ScriptStats.Fields.COMPILATIONS, stats.getCompilations()), - field(ScriptStats.Fields.CACHE_EVICTIONS, stats.getCacheEvictions()), - field(ScriptStats.Fields.COMPILATION_LIMIT_TRIGGERED, stats.getCompilationLimitTriggered()), - endObject() - ); - }), endArray(), endObject()) - ); + Function statsFields = s -> (b, p) -> b.field(ScriptStats.Fields.COMPILATIONS, s.getCompilations()) + .field(ScriptStats.Fields.CACHE_EVICTIONS, s.getCacheEvictions()) + .field(ScriptStats.Fields.COMPILATION_LIMIT_TRIGGERED, s.getCompilationLimitTriggered()); + + return ChunkedToXContent.builder(outerParams).object(SCRIPT_CACHE_STATS, sb -> { + if (general != null) { + sb.xContentObject(Fields.SUM, statsFields.apply(general)); + } else { + sb.xContentObject(Fields.SUM, statsFields.apply(sum())); + sb.array( + Fields.CONTEXTS, + context.entrySet().stream().sorted(Map.Entry.comparingByKey()).iterator(), + (eb, e) -> eb.object(ebo -> { + ebo.field(Fields.CONTEXT, e.getKey()); + ebo.append(statsFields.apply(e.getValue())); + }) + ); + } + }); } /** diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java index 8adf18cd82f5..67e5f30f023c 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java @@ -313,11 +313,15 @@ public class ClusterRerouteResponseTests extends ESTestCase { fail(e); } - final var expectedChunks = Objects.equals(params.param("metric"), "none") - ? 2 - : 4 + ClusterStateTests.expectedChunkCount(params, response.getState()); + int[] expectedChunks = new int[] { 3 }; + if (Objects.equals(params.param("metric"), "none") == false) { + expectedChunks[0] += 2 + ClusterStateTests.expectedChunkCount(params, response.getState()); + } + if (params.paramAsBoolean("explain", false)) { + expectedChunks[0]++; + } - AbstractChunkedSerializingTestCase.assertChunkCount(response, params, ignored -> expectedChunks); + AbstractChunkedSerializingTestCase.assertChunkCount(response, params, o -> expectedChunks[0]); assertCriticalWarnings(criticalDeprecationWarnings); // check the v7 API too @@ -331,7 +335,7 @@ public class ClusterRerouteResponseTests extends ESTestCase { public boolean isFragment() { return response.isFragment(); } - }, params, ignored -> expectedChunks); + }, params, o -> expectedChunks[0]++); // the v7 API should not emit any deprecation warnings assertCriticalWarnings(); } diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/ChunkedToXContentBuilderTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/ChunkedToXContentBuilderTests.java new file mode 100644 index 000000000000..ff811f5d6d73 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/common/xcontent/ChunkedToXContentBuilderTests.java @@ -0,0 +1,88 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.common.xcontent; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.Iterators; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.ToXContent; + +import java.util.function.IntFunction; +import java.util.stream.IntStream; + +import static org.hamcrest.Matchers.equalTo; + +public class ChunkedToXContentBuilderTests extends ESTestCase { + + public void testFieldWithInnerChunkedObject() { + + ToXContent innerXContent = (b, p) -> { + b.startObject(); + b.field("field1", 10); + b.field("field2", "aaa"); + b.endObject(); + return b; + }; + + ToXContent outerXContent = (b, p) -> b.field("field3", 10).field("field4", innerXContent); + + String expectedContent = Strings.toString(outerXContent); + + ChunkedToXContentObject innerChunkedContent = params -> new ChunkedToXContentBuilder(params).object( + o -> o.field("field1", 10).field("field2", "aaa") + ); + + ChunkedToXContent outerChunkedContent = params -> new ChunkedToXContentBuilder(params).field("field3", 10) + .field("field4", innerChunkedContent); + + assertThat(Strings.toString(outerChunkedContent), equalTo(expectedContent)); + } + + public void testFieldWithInnerChunkedArray() { + + ToXContent innerXContent = (b, p) -> { + b.startArray(); + b.value(10); + b.value(20); + b.endArray(); + return b; + }; + + ToXContent outerXContent = (b, p) -> b.field("field3", 10).field("field4", innerXContent); + + String expectedContent = Strings.toString(outerXContent); + + IntFunction value = v -> (b, p) -> b.value(v); + + ChunkedToXContentObject innerChunkedContent = params -> new ChunkedToXContentBuilder(params).array( + IntStream.of(10, 20).mapToObj(value).iterator() + ); + + ChunkedToXContent outerChunkedContent = params -> new ChunkedToXContentBuilder(params).field("field3", 10) + .field("field4", innerChunkedContent); + + assertThat(Strings.toString(outerChunkedContent), equalTo(expectedContent)); + } + + public void testFieldWithInnerChunkedField() { + + ToXContent innerXContent = (b, p) -> b.value(10); + ToXContent outerXContent = (b, p) -> b.field("field3", 10).field("field4", innerXContent); + + String expectedContent = Strings.toString(outerXContent); + + ChunkedToXContentObject innerChunkedContent = params -> Iterators.single((b, p) -> b.value(10)); + + ChunkedToXContent outerChunkedContent = params -> new ChunkedToXContentBuilder(params).field("field3", 10) + .field("field4", innerChunkedContent); + + assertThat(Strings.toString(outerChunkedContent), equalTo(expectedContent)); + } +} diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelperTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelperTests.java deleted file mode 100644 index 353725fbd075..000000000000 --- a/server/src/test/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelperTests.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.common.xcontent; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.Iterators; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xcontent.ToXContent; - -import java.util.Iterator; -import java.util.function.IntFunction; - -import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; -import static org.hamcrest.Matchers.equalTo; - -public class ChunkedToXContentHelperTests extends ESTestCase { - - public void testFieldWithInnerChunkedObject() { - - ToXContent innerXContent = (builder, p) -> { - builder.startObject(); - builder.field("field1", 10); - builder.field("field2", "aaa"); - builder.endObject(); - return builder; - }; - - ToXContent outerXContent = (builder, p) -> { - builder.field("field3", 10); - builder.field("field4", innerXContent); - return builder; - }; - - var expectedContent = Strings.toString(outerXContent); - - ChunkedToXContentObject innerChunkedContent = params -> Iterators.concat( - ChunkedToXContentHelper.startObject(), - ChunkedToXContentHelper.field("field1", 10), - ChunkedToXContentHelper.field("field2", "aaa"), - ChunkedToXContentHelper.endObject() - ); - - ChunkedToXContent outerChunkedContent = params -> Iterators.concat( - ChunkedToXContentHelper.field("field3", 10), - ChunkedToXContentHelper.field("field4", innerChunkedContent, EMPTY_PARAMS) - ); - - assertThat(Strings.toString(outerChunkedContent), equalTo(expectedContent)); - } - - public void testFieldWithInnerChunkedArray() { - - ToXContent innerXContent = (builder, p) -> { - builder.startArray(); - builder.value(10); - builder.value(20); - builder.endArray(); - return builder; - }; - - ToXContent outerXContent = (builder, p) -> { - builder.field("field3", 10); - builder.field("field4", innerXContent); - return builder; - }; - - var expectedContent = Strings.toString(outerXContent); - - IntFunction> value = v -> Iterators.single(((builder, p) -> builder.value(v))); - - ChunkedToXContentObject innerChunkedContent = params -> Iterators.concat( - ChunkedToXContentHelper.startArray(), - value.apply(10), - value.apply(20), - ChunkedToXContentHelper.endArray() - ); - - ChunkedToXContent outerChunkedContent = params -> Iterators.concat( - ChunkedToXContentHelper.field("field3", 10), - ChunkedToXContentHelper.field("field4", innerChunkedContent, EMPTY_PARAMS) - ); - - assertThat(Strings.toString(outerChunkedContent), equalTo(expectedContent)); - } - - public void testFieldWithInnerChunkedField() { - - ToXContent innerXContent = (builder, p) -> { - builder.value(10); - return builder; - }; - - ToXContent outerXContent = (builder, p) -> { - builder.field("field3", 10); - builder.field("field4", innerXContent); - return builder; - }; - - var expectedContent = Strings.toString(outerXContent); - - IntFunction> value = v -> Iterators.single(((builder, p) -> builder.value(v))); - - ChunkedToXContentObject innerChunkedContent = params -> Iterators.single((builder, p) -> builder.value(10)); - - ChunkedToXContent outerChunkedContent = params -> Iterators.concat( - ChunkedToXContentHelper.field("field3", 10), - ChunkedToXContentHelper.field("field4", innerChunkedContent, EMPTY_PARAMS) - ); - - assertThat(Strings.toString(outerChunkedContent), equalTo(expectedContent)); - } -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java index 8e4da3f138a6..3232f3a9118d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java @@ -15,7 +15,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.common.xcontent.ChunkedToXContentBuilder; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockStreamInput; @@ -30,7 +29,6 @@ import org.elasticsearch.xpack.core.esql.action.EsqlResponse; import org.elasticsearch.xpack.esql.core.type.DataType; import java.io.IOException; -import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Objects; @@ -186,58 +184,35 @@ public class EsqlQueryResponse extends org.elasticsearch.xpack.core.esql.action. return executionInfo; } - private Iterator asyncPropertiesOrEmpty() { - if (isAsync) { - return ChunkedToXContentHelper.singleChunk((builder, params) -> { - if (asyncExecutionId != null) { - builder.field("id", asyncExecutionId); - } - builder.field("is_running", isRunning); - return builder; - }); - } else { - return Collections.emptyIterator(); - } - } - @Override public Iterator toXContentChunked(ToXContent.Params params) { - boolean dropNullColumns = params.paramAsBoolean(DROP_NULL_COLUMNS_OPTION, false); - boolean[] nullColumns = dropNullColumns ? nullColumns() : null; + return ChunkedToXContent.builder(params).object(b -> { + boolean dropNullColumns = b.params().paramAsBoolean(DROP_NULL_COLUMNS_OPTION, false); + boolean[] nullColumns = dropNullColumns ? nullColumns() : null; - Iterator tookTime; - if (executionInfo != null && executionInfo.overallTook() != null) { - tookTime = ChunkedToXContentHelper.singleChunk((builder, p) -> { - builder.field("took", executionInfo.overallTook().millis()); - return builder; - }); - } else { - tookTime = Collections.emptyIterator(); - } - - Iterator columnHeadings = dropNullColumns - ? Iterators.concat( - ResponseXContentUtils.allColumns(columns, "all_columns"), - ResponseXContentUtils.nonNullColumns(columns, nullColumns, "columns") - ) - : ResponseXContentUtils.allColumns(columns, "columns"); - Iterator valuesIt = ResponseXContentUtils.columnValues(this.columns, this.pages, columnar, nullColumns); - Iterator profileRender = profile == null - ? List.of().iterator() - : ChunkedToXContentHelper.field("profile", profile, params); - Iterator executionInfoRender = executionInfo == null || executionInfo.isCrossClusterSearch() == false - ? List.of().iterator() - : ChunkedToXContentHelper.field("_clusters", executionInfo, params); - return Iterators.concat( - ChunkedToXContentHelper.startObject(), - asyncPropertiesOrEmpty(), - tookTime, - columnHeadings, - ChunkedToXContentHelper.array("values", valuesIt), - executionInfoRender, - profileRender, - ChunkedToXContentHelper.endObject() - ); + if (isAsync) { + if (asyncExecutionId != null) { + b.field("id", asyncExecutionId); + } + b.field("is_running", isRunning); + } + if (executionInfo != null && executionInfo.overallTook() != null) { + b.field("took", executionInfo.overallTook().millis()); + } + if (dropNullColumns) { + b.append(ResponseXContentUtils.allColumns(columns, "all_columns")) + .append(ResponseXContentUtils.nonNullColumns(columns, nullColumns, "columns")); + } else { + b.append(ResponseXContentUtils.allColumns(columns, "columns")); + } + b.array("values", ResponseXContentUtils.columnValues(this.columns, this.pages, columnar, nullColumns)); + if (executionInfo != null && executionInfo.isCrossClusterSearch()) { + b.field("_clusters", executionInfo); + } + if (profile != null) { + b.field("profile", profile); + } + }); } private boolean[] nullColumns() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index 7c0b6e6a2eaa..abf03d4fe06d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -525,19 +525,19 @@ public class EsqlQueryResponseTests extends AbstractChunkedSerializingTestCase p.getPositionCount() * p.getBlockCount()).sum() + columnCount * 2; - assertChunkCount(resp, r -> 6 + sizeClusterDetails + bodySize); // is_running + assertChunkCount(resp, r -> 7 + sizeClusterDetails + bodySize); // is_running } } public void testChunkResponseSizeRows() { int sizeClusterDetails = 14; try (EsqlQueryResponse resp = randomResponse(false, null)) { - int bodySize = resp.pages().stream().mapToInt(p -> p.getPositionCount()).sum(); + int bodySize = resp.pages().stream().mapToInt(Page::getPositionCount).sum(); assertChunkCount(resp, r -> 5 + sizeClusterDetails + bodySize); } try (EsqlQueryResponse resp = randomResponseAsync(false, null, true)) { - int bodySize = resp.pages().stream().mapToInt(p -> p.getPositionCount()).sum(); - assertChunkCount(resp, r -> 6 + sizeClusterDetails + bodySize); + int bodySize = resp.pages().stream().mapToInt(Page::getPositionCount).sum(); + assertChunkCount(resp, r -> 7 + sizeClusterDetails + bodySize); } } diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/SingleNodeShutdownStatus.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/SingleNodeShutdownStatus.java index 39bf9e78b3b0..810bd8f6e9ce 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/SingleNodeShutdownStatus.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/SingleNodeShutdownStatus.java @@ -12,11 +12,10 @@ import org.elasticsearch.cluster.metadata.ShutdownPluginsStatus; import org.elasticsearch.cluster.metadata.ShutdownShardMigrationStatus; import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; @@ -25,10 +24,6 @@ import java.io.IOException; import java.util.Iterator; import java.util.Objects; -import static org.elasticsearch.common.xcontent.ChunkedToXContentHelper.endObject; -import static org.elasticsearch.common.xcontent.ChunkedToXContentHelper.singleChunk; -import static org.elasticsearch.common.xcontent.ChunkedToXContentHelper.startObject; - public class SingleNodeShutdownStatus implements Writeable, ChunkedToXContentObject { private final SingleNodeShutdownMetadata metadata; @@ -116,26 +111,27 @@ public class SingleNodeShutdownStatus implements Writeable, ChunkedToXContentObj @Override public Iterator toXContentChunked(ToXContent.Params params) { - return Iterators.concat(startObject(), singleChunk((builder, p) -> { - builder.field(SingleNodeShutdownMetadata.NODE_ID_FIELD.getPreferredName(), metadata.getNodeId()); - builder.field(SingleNodeShutdownMetadata.TYPE_FIELD.getPreferredName(), metadata.getType()); - builder.field(SingleNodeShutdownMetadata.REASON_FIELD.getPreferredName(), metadata.getReason()); - if (metadata.getAllocationDelay() != null) { - builder.field( - SingleNodeShutdownMetadata.ALLOCATION_DELAY_FIELD.getPreferredName(), - metadata.getAllocationDelay().getStringRep() + return ChunkedToXContent.builder(params).object(b -> { + b.append((builder, p) -> { + builder.field(SingleNodeShutdownMetadata.NODE_ID_FIELD.getPreferredName(), metadata.getNodeId()); + builder.field(SingleNodeShutdownMetadata.TYPE_FIELD.getPreferredName(), metadata.getType()); + builder.field(SingleNodeShutdownMetadata.REASON_FIELD.getPreferredName(), metadata.getReason()); + if (metadata.getAllocationDelay() != null) { + builder.field( + SingleNodeShutdownMetadata.ALLOCATION_DELAY_FIELD.getPreferredName(), + metadata.getAllocationDelay().getStringRep() + ); + } + builder.timeField( + SingleNodeShutdownMetadata.STARTED_AT_MILLIS_FIELD.getPreferredName(), + SingleNodeShutdownMetadata.STARTED_AT_READABLE_FIELD, + metadata.getStartedAtMillis() ); - } - builder.timeField( - SingleNodeShutdownMetadata.STARTED_AT_MILLIS_FIELD.getPreferredName(), - SingleNodeShutdownMetadata.STARTED_AT_READABLE_FIELD, - metadata.getStartedAtMillis() - ); - builder.field(STATUS.getPreferredName(), overallStatus()); - return builder; - }), - ChunkedToXContentHelper.field(SHARD_MIGRATION_FIELD.getPreferredName(), shardMigrationStatus, params), - singleChunk((builder, p) -> { + builder.field(STATUS.getPreferredName(), overallStatus()); + return builder; + }); + b.field(SHARD_MIGRATION_FIELD.getPreferredName(), shardMigrationStatus); + b.append((builder, p) -> { builder.field(PERSISTENT_TASKS_FIELD.getPreferredName(), persistentTasksStatus); builder.field(PLUGINS_STATUS.getPreferredName(), pluginsStatus); if (metadata.getTargetNodeName() != null) { @@ -148,8 +144,7 @@ public class SingleNodeShutdownStatus implements Writeable, ChunkedToXContentObj ); } return builder; - }), - endObject() - ); + }); + }); } } From b5d6fa0130dd987cea0c215751553478dc38f632 Mon Sep 17 00:00:00 2001 From: Dan Rubinstein Date: Tue, 8 Oct 2024 09:57:46 -0400 Subject: [PATCH 45/85] Add chunking settings configuration to CohereService, AmazonBedrockService, and AzureOpenAiService (#113897) * Add chunking settings configuration to CohereService, AmazonBedrockService, and AzureOpenAiService * Update docs/changelog/113897.yaml * Run spotlessApply * Updating CohereServiceMixedIT to account for clusters without chunking settings in index mapping --------- Co-authored-by: Elastic Machine --- docs/changelog/113897.yaml | 6 + .../qa/mixed/CohereServiceMixedIT.java | 29 +- .../amazonbedrock/AmazonBedrockService.java | 41 ++- .../AmazonBedrockEmbeddingsModel.java | 6 +- .../azureopenai/AzureOpenAiService.java | 49 ++- .../AzureOpenAiEmbeddingsModel.java | 6 +- .../services/cohere/CohereService.java | 48 ++- .../embeddings/CohereEmbeddingsModel.java | 6 +- .../AmazonBedrockServiceTests.java | 338 +++++++++++++++++- .../AmazonBedrockEmbeddingsModelTests.java | 61 ++++ .../azureopenai/AzureOpenAiServiceTests.java | 296 ++++++++++++++- .../AzureOpenAiEmbeddingsModelTests.java | 26 ++ .../services/cohere/CohereServiceTests.java | 314 +++++++++++++++- .../CohereEmbeddingsModelTests.java | 27 ++ 14 files changed, 1214 insertions(+), 39 deletions(-) create mode 100644 docs/changelog/113897.yaml diff --git a/docs/changelog/113897.yaml b/docs/changelog/113897.yaml new file mode 100644 index 000000000000..db0c53518613 --- /dev/null +++ b/docs/changelog/113897.yaml @@ -0,0 +1,6 @@ +pr: 113897 +summary: "Add chunking settings configuration to `CohereService,` `AmazonBedrockService,`\ + \ and `AzureOpenAiService`" +area: Machine Learning +type: enhancement +issues: [] diff --git a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/CohereServiceMixedIT.java b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/CohereServiceMixedIT.java index 69274b46d75c..8cb37ad64535 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/CohereServiceMixedIT.java +++ b/x-pack/plugin/inference/qa/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/inference/qa/mixed/CohereServiceMixedIT.java @@ -22,6 +22,7 @@ import java.util.List; import java.util.Map; import static org.elasticsearch.xpack.inference.qa.mixed.MixedClusterSpecTestCase.bwcVersion; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasSize; @@ -32,6 +33,7 @@ public class CohereServiceMixedIT extends BaseMixedTestCase { private static final String COHERE_EMBEDDINGS_ADDED = "8.13.0"; private static final String COHERE_RERANK_ADDED = "8.14.0"; + private static final String COHERE_EMBEDDINGS_CHUNKING_SETTINGS_ADDED = "8.16.0"; private static final String BYTE_ALIAS_FOR_INT8_ADDED = "8.14.0"; private static final String MINIMUM_SUPPORTED_VERSION = "8.15.0"; @@ -65,13 +67,28 @@ public class CohereServiceMixedIT extends BaseMixedTestCase { final String inferenceIdInt8 = "mixed-cluster-cohere-embeddings-int8"; final String inferenceIdFloat = "mixed-cluster-cohere-embeddings-float"; - // queue a response as PUT will call the service - cohereEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponseByte())); - put(inferenceIdInt8, embeddingConfigInt8(getUrl(cohereEmbeddingsServer)), TaskType.TEXT_EMBEDDING); + try { + // queue a response as PUT will call the service + cohereEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponseByte())); + put(inferenceIdInt8, embeddingConfigInt8(getUrl(cohereEmbeddingsServer)), TaskType.TEXT_EMBEDDING); - // float model - cohereEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponseFloat())); - put(inferenceIdFloat, embeddingConfigFloat(getUrl(cohereEmbeddingsServer)), TaskType.TEXT_EMBEDDING); + // float model + cohereEmbeddingsServer.enqueue(new MockResponse().setResponseCode(200).setBody(embeddingResponseFloat())); + put(inferenceIdFloat, embeddingConfigFloat(getUrl(cohereEmbeddingsServer)), TaskType.TEXT_EMBEDDING); + } catch (Exception e) { + if (bwcVersion.before(Version.fromString(COHERE_EMBEDDINGS_CHUNKING_SETTINGS_ADDED))) { + // Chunking settings were added in 8.16.0. if the version is before that, an exception will be thrown if the index mapping + // was created based on a mapping from an old node + assertThat( + e.getMessage(), + containsString( + "One or more nodes in your cluster does not support chunking_settings. " + + "Please update all nodes in your cluster to the latest version to use chunking_settings." + ) + ); + return; + } + } var configs = (List>) get(TaskType.TEXT_EMBEDDING, inferenceIdInt8).get("endpoints"); assertEquals("cohere", configs.get(0).get("service")); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java index bc0d10279ae4..c7c073660624 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java @@ -16,6 +16,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.ChunkingOptions; +import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; @@ -23,6 +24,8 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; +import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsBuilder; import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.external.action.amazonbedrock.AmazonBedrockActionCreator; import org.elasticsearch.xpack.inference.external.amazonbedrock.AmazonBedrockRequestSender; @@ -99,8 +102,20 @@ public class AmazonBedrockService extends SenderService { var actionCreator = new AmazonBedrockActionCreator(amazonBedrockSender, this.getServiceComponents(), timeout); if (model instanceof AmazonBedrockModel baseAmazonBedrockModel) { var maxBatchSize = getEmbeddingsMaxBatchSize(baseAmazonBedrockModel.provider()); - var batchedRequests = new EmbeddingRequestChunker(inputs.getInputs(), maxBatchSize, EmbeddingRequestChunker.EmbeddingType.FLOAT) - .batchRequestsWithListeners(listener); + + List batchedRequests; + if (ChunkingSettingsFeatureFlag.isEnabled()) { + batchedRequests = new EmbeddingRequestChunker( + inputs.getInputs(), + maxBatchSize, + EmbeddingRequestChunker.EmbeddingType.FLOAT, + baseAmazonBedrockModel.getConfigurations().getChunkingSettings() + ).batchRequestsWithListeners(listener); + } else { + batchedRequests = new EmbeddingRequestChunker(inputs.getInputs(), maxBatchSize, EmbeddingRequestChunker.EmbeddingType.FLOAT) + .batchRequestsWithListeners(listener); + } + for (var request : batchedRequests) { var action = baseAmazonBedrockModel.accept(actionCreator, taskSettings); action.execute(new DocumentsOnlyInput(request.batch().inputs()), timeout, request.listener()); @@ -126,11 +141,19 @@ public class AmazonBedrockService extends SenderService { Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); + ChunkingSettings chunkingSettings = null; + if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap( + removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS) + ); + } + AmazonBedrockModel model = createModel( modelId, taskType, serviceSettingsMap, taskSettingsMap, + chunkingSettings, serviceSettingsMap, TaskType.unsupportedTaskTypeErrorMsg(taskType, NAME), ConfigurationParseContext.REQUEST @@ -157,11 +180,17 @@ public class AmazonBedrockService extends SenderService { Map taskSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.TASK_SETTINGS); Map secretSettingsMap = removeFromMapOrDefaultEmpty(secrets, ModelSecrets.SECRET_SETTINGS); + ChunkingSettings chunkingSettings = null; + if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + } + return createModel( modelId, taskType, serviceSettingsMap, taskSettingsMap, + chunkingSettings, secretSettingsMap, parsePersistedConfigErrorMsg(modelId, NAME), ConfigurationParseContext.PERSISTENT @@ -173,11 +202,17 @@ public class AmazonBedrockService extends SenderService { Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); + ChunkingSettings chunkingSettings = null; + if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + } + return createModel( modelId, taskType, serviceSettingsMap, taskSettingsMap, + chunkingSettings, null, parsePersistedConfigErrorMsg(modelId, NAME), ConfigurationParseContext.PERSISTENT @@ -189,6 +224,7 @@ public class AmazonBedrockService extends SenderService { TaskType taskType, Map serviceSettings, Map taskSettings, + ChunkingSettings chunkingSettings, @Nullable Map secretSettings, String failureMessage, ConfigurationParseContext context @@ -201,6 +237,7 @@ public class AmazonBedrockService extends SenderService { NAME, serviceSettings, taskSettings, + chunkingSettings, secretSettings, context ); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/embeddings/AmazonBedrockEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/embeddings/AmazonBedrockEmbeddingsModel.java index 0e3a954a0327..186d977d2067 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/embeddings/AmazonBedrockEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/embeddings/AmazonBedrockEmbeddingsModel.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.inference.services.amazonbedrock.embeddings; import org.elasticsearch.common.ValidationException; +import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptyTaskSettings; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; @@ -42,6 +43,7 @@ public class AmazonBedrockEmbeddingsModel extends AmazonBedrockModel { String service, Map serviceSettings, Map taskSettings, + ChunkingSettings chunkingSettings, Map secretSettings, ConfigurationParseContext context ) { @@ -51,6 +53,7 @@ public class AmazonBedrockEmbeddingsModel extends AmazonBedrockModel { service, AmazonBedrockEmbeddingsServiceSettings.fromMap(serviceSettings, context), new EmptyTaskSettings(), + chunkingSettings, AmazonBedrockSecretSettings.fromMap(secretSettings) ); } @@ -61,10 +64,11 @@ public class AmazonBedrockEmbeddingsModel extends AmazonBedrockModel { String service, AmazonBedrockEmbeddingsServiceSettings serviceSettings, TaskSettings taskSettings, + ChunkingSettings chunkingSettings, AmazonBedrockSecretSettings secrets ) { super( - new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, new EmptyTaskSettings()), + new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, new EmptyTaskSettings(), chunkingSettings), new ModelSecrets(secrets) ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java index 96399bb954cd..07708ee07209 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java @@ -16,6 +16,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.ChunkingOptions; +import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; @@ -24,6 +25,8 @@ import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; +import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsBuilder; import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.external.action.azureopenai.AzureOpenAiActionCreator; import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; @@ -70,11 +73,19 @@ public class AzureOpenAiService extends SenderService { Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); + ChunkingSettings chunkingSettings = null; + if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap( + removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS) + ); + } + AzureOpenAiModel model = createModel( inferenceEntityId, taskType, serviceSettingsMap, taskSettingsMap, + chunkingSettings, serviceSettingsMap, TaskType.unsupportedTaskTypeErrorMsg(taskType, NAME), ConfigurationParseContext.REQUEST @@ -95,6 +106,7 @@ public class AzureOpenAiService extends SenderService { TaskType taskType, Map serviceSettings, Map taskSettings, + ChunkingSettings chunkingSettings, @Nullable Map secretSettings, String failureMessage ) { @@ -103,6 +115,7 @@ public class AzureOpenAiService extends SenderService { taskType, serviceSettings, taskSettings, + chunkingSettings, secretSettings, failureMessage, ConfigurationParseContext.PERSISTENT @@ -114,6 +127,7 @@ public class AzureOpenAiService extends SenderService { TaskType taskType, Map serviceSettings, Map taskSettings, + ChunkingSettings chunkingSettings, @Nullable Map secretSettings, String failureMessage, ConfigurationParseContext context @@ -126,6 +140,7 @@ public class AzureOpenAiService extends SenderService { NAME, serviceSettings, taskSettings, + chunkingSettings, secretSettings, context ); @@ -156,11 +171,17 @@ public class AzureOpenAiService extends SenderService { Map taskSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.TASK_SETTINGS); Map secretSettingsMap = removeFromMapOrDefaultEmpty(secrets, ModelSecrets.SECRET_SETTINGS); + ChunkingSettings chunkingSettings = null; + if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + } + return createModelFromPersistent( inferenceEntityId, taskType, serviceSettingsMap, taskSettingsMap, + chunkingSettings, secretSettingsMap, parsePersistedConfigErrorMsg(inferenceEntityId, NAME) ); @@ -171,11 +192,17 @@ public class AzureOpenAiService extends SenderService { Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); + ChunkingSettings chunkingSettings = null; + if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + } + return createModelFromPersistent( inferenceEntityId, taskType, serviceSettingsMap, taskSettingsMap, + chunkingSettings, null, parsePersistedConfigErrorMsg(inferenceEntityId, NAME) ); @@ -218,11 +245,23 @@ public class AzureOpenAiService extends SenderService { } AzureOpenAiModel azureOpenAiModel = (AzureOpenAiModel) model; var actionCreator = new AzureOpenAiActionCreator(getSender(), getServiceComponents()); - var batchedRequests = new EmbeddingRequestChunker( - inputs.getInputs(), - EMBEDDING_MAX_BATCH_SIZE, - EmbeddingRequestChunker.EmbeddingType.FLOAT - ).batchRequestsWithListeners(listener); + + List batchedRequests; + if (ChunkingSettingsFeatureFlag.isEnabled()) { + batchedRequests = new EmbeddingRequestChunker( + inputs.getInputs(), + EMBEDDING_MAX_BATCH_SIZE, + EmbeddingRequestChunker.EmbeddingType.FLOAT, + azureOpenAiModel.getConfigurations().getChunkingSettings() + ).batchRequestsWithListeners(listener); + } else { + batchedRequests = new EmbeddingRequestChunker( + inputs.getInputs(), + EMBEDDING_MAX_BATCH_SIZE, + EmbeddingRequestChunker.EmbeddingType.FLOAT + ).batchRequestsWithListeners(listener); + } + for (var request : batchedRequests) { var action = azureOpenAiModel.accept(actionCreator, taskSettings); action.execute(new DocumentsOnlyInput(request.batch().inputs()), timeout, request.listener()); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModel.java index 377bb33f5861..7b83d5322a69 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModel.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.inference.services.azureopenai.embeddings; import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.TaskType; @@ -38,6 +39,7 @@ public class AzureOpenAiEmbeddingsModel extends AzureOpenAiModel { String service, Map serviceSettings, Map taskSettings, + ChunkingSettings chunkingSettings, @Nullable Map secrets, ConfigurationParseContext context ) { @@ -47,6 +49,7 @@ public class AzureOpenAiEmbeddingsModel extends AzureOpenAiModel { service, AzureOpenAiEmbeddingsServiceSettings.fromMap(serviceSettings, context), AzureOpenAiEmbeddingsTaskSettings.fromMap(taskSettings), + chunkingSettings, AzureOpenAiSecretSettings.fromMap(secrets) ); } @@ -58,10 +61,11 @@ public class AzureOpenAiEmbeddingsModel extends AzureOpenAiModel { String service, AzureOpenAiEmbeddingsServiceSettings serviceSettings, AzureOpenAiEmbeddingsTaskSettings taskSettings, + ChunkingSettings chunkingSettings, @Nullable AzureOpenAiSecretSettings secrets ) { super( - new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, taskSettings), + new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, taskSettings, chunkingSettings), new ModelSecrets(secrets), serviceSettings ); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java index 3ba93dd8d1b6..1804d3bdf593 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java @@ -15,6 +15,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.ChunkingOptions; +import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; @@ -23,6 +24,8 @@ import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; +import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsBuilder; import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.external.action.cohere.CohereActionCreator; import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; @@ -76,11 +79,19 @@ public class CohereService extends SenderService { Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); + ChunkingSettings chunkingSettings = null; + if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap( + removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS) + ); + } + CohereModel model = createModel( inferenceEntityId, taskType, serviceSettingsMap, taskSettingsMap, + chunkingSettings, serviceSettingsMap, TaskType.unsupportedTaskTypeErrorMsg(taskType, NAME), ConfigurationParseContext.REQUEST @@ -101,6 +112,7 @@ public class CohereService extends SenderService { TaskType taskType, Map serviceSettings, Map taskSettings, + ChunkingSettings chunkingSettings, @Nullable Map secretSettings, String failureMessage ) { @@ -109,6 +121,7 @@ public class CohereService extends SenderService { taskType, serviceSettings, taskSettings, + chunkingSettings, secretSettings, failureMessage, ConfigurationParseContext.PERSISTENT @@ -120,6 +133,7 @@ public class CohereService extends SenderService { TaskType taskType, Map serviceSettings, Map taskSettings, + ChunkingSettings chunkingSettings, @Nullable Map secretSettings, String failureMessage, ConfigurationParseContext context @@ -131,6 +145,7 @@ public class CohereService extends SenderService { NAME, serviceSettings, taskSettings, + chunkingSettings, secretSettings, context ); @@ -159,11 +174,17 @@ public class CohereService extends SenderService { Map taskSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.TASK_SETTINGS); Map secretSettingsMap = removeFromMapOrThrowIfNull(secrets, ModelSecrets.SECRET_SETTINGS); + ChunkingSettings chunkingSettings = null; + if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + } + return createModelWithoutLoggingDeprecations( inferenceEntityId, taskType, serviceSettingsMap, taskSettingsMap, + chunkingSettings, secretSettingsMap, parsePersistedConfigErrorMsg(inferenceEntityId, NAME) ); @@ -174,11 +195,17 @@ public class CohereService extends SenderService { Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); Map taskSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.TASK_SETTINGS); + ChunkingSettings chunkingSettings = null; + if (ChunkingSettingsFeatureFlag.isEnabled() && TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + } + return createModelWithoutLoggingDeprecations( inferenceEntityId, taskType, serviceSettingsMap, taskSettingsMap, + chunkingSettings, null, parsePersistedConfigErrorMsg(inferenceEntityId, NAME) ); @@ -223,11 +250,22 @@ public class CohereService extends SenderService { CohereModel cohereModel = (CohereModel) model; var actionCreator = new CohereActionCreator(getSender(), getServiceComponents()); - var batchedRequests = new EmbeddingRequestChunker( - inputs.getInputs(), - EMBEDDING_MAX_BATCH_SIZE, - EmbeddingRequestChunker.EmbeddingType.fromDenseVectorElementType(model.getServiceSettings().elementType()) - ).batchRequestsWithListeners(listener); + List batchedRequests; + if (ChunkingSettingsFeatureFlag.isEnabled()) { + batchedRequests = new EmbeddingRequestChunker( + inputs.getInputs(), + EMBEDDING_MAX_BATCH_SIZE, + EmbeddingRequestChunker.EmbeddingType.fromDenseVectorElementType(model.getServiceSettings().elementType()), + cohereModel.getConfigurations().getChunkingSettings() + ).batchRequestsWithListeners(listener); + } else { + batchedRequests = new EmbeddingRequestChunker( + inputs.getInputs(), + EMBEDDING_MAX_BATCH_SIZE, + EmbeddingRequestChunker.EmbeddingType.fromDenseVectorElementType(model.getServiceSettings().elementType()) + ).batchRequestsWithListeners(listener); + } + for (var request : batchedRequests) { var action = cohereModel.accept(actionCreator, taskSettings, inputType); action.execute(new DocumentsOnlyInput(request.batch().inputs()), timeout, request.listener()); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModel.java index fea5226bf9c6..0f62ab51145f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModel.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.inference.services.cohere.embeddings; import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; @@ -33,6 +34,7 @@ public class CohereEmbeddingsModel extends CohereModel { String service, Map serviceSettings, Map taskSettings, + ChunkingSettings chunkingSettings, @Nullable Map secrets, ConfigurationParseContext context ) { @@ -42,6 +44,7 @@ public class CohereEmbeddingsModel extends CohereModel { service, CohereEmbeddingsServiceSettings.fromMap(serviceSettings, context), CohereEmbeddingsTaskSettings.fromMap(taskSettings), + chunkingSettings, DefaultSecretSettings.fromMap(secrets) ); } @@ -53,10 +56,11 @@ public class CohereEmbeddingsModel extends CohereModel { String service, CohereEmbeddingsServiceSettings serviceSettings, CohereEmbeddingsTaskSettings taskSettings, + ChunkingSettings chunkingSettings, @Nullable DefaultSecretSettings secretSettings ) { super( - new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings), + new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings, chunkingSettings), new ModelSecrets(secretSettings), secretSettings, serviceSettings.getCommonSettings() diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java index 0b3cf533d818..9c746e7c2aed 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.ChunkingOptions; +import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; @@ -25,6 +26,7 @@ import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; @@ -57,6 +59,8 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.xpack.inference.Utils.getInvalidModel; import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests.createRandomChunkingSettings; +import static org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests.createRandomChunkingSettingsMap; import static org.elasticsearch.xpack.inference.results.ChatCompletionResultsTests.buildExpectationCompletion; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectationFloat; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; @@ -305,6 +309,93 @@ public class AmazonBedrockServiceTests extends ESTestCase { } } + public void testParseRequestConfig_ThrowsElasticsearchStatusExceptionWhenChunkingSettingsProvidedAndFeatureFlagDisabled() + throws IOException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); + try (var service = createAmazonBedrockService()) { + ActionListener modelVerificationListener = ActionListener.wrap( + model -> fail("Expected exception, but got model: " + model), + exception -> { + assertThat(exception, instanceOf(ElasticsearchStatusException.class)); + assertThat(exception.getMessage(), containsString("Model configuration contains settings")); + } + ); + + service.parseRequestConfig( + "id", + TaskType.TEXT_EMBEDDING, + getRequestConfigMap( + createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, null, null, null), + Map.of(), + createRandomChunkingSettingsMap(), + getAmazonBedrockSecretSettingsMap("access", "secret") + ), + modelVerificationListener + ); + } + } + + public void testParseRequestConfig_CreatesAnAmazonBedrockEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() + throws IOException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + try (var service = createAmazonBedrockService()) { + ActionListener modelVerificationListener = ActionListener.wrap(model -> { + assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class)); + + var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings(); + assertThat(settings.region(), is("region")); + assertThat(settings.modelId(), is("model")); + assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN)); + var secretSettings = (AmazonBedrockSecretSettings) model.getSecretSettings(); + assertThat(secretSettings.accessKey.toString(), is("access")); + assertThat(secretSettings.secretKey.toString(), is("secret")); + assertThat(model.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + }, exception -> fail("Unexpected exception: " + exception)); + + service.parseRequestConfig( + "id", + TaskType.TEXT_EMBEDDING, + getRequestConfigMap( + createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, null, null, null), + Map.of(), + createRandomChunkingSettingsMap(), + getAmazonBedrockSecretSettingsMap("access", "secret") + ), + modelVerificationListener + ); + } + } + + public void testParseRequestConfig_CreatesAnAmazonBedrockEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() + throws IOException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + try (var service = createAmazonBedrockService()) { + ActionListener modelVerificationListener = ActionListener.wrap(model -> { + assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class)); + + var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings(); + assertThat(settings.region(), is("region")); + assertThat(settings.modelId(), is("model")); + assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN)); + var secretSettings = (AmazonBedrockSecretSettings) model.getSecretSettings(); + assertThat(secretSettings.accessKey.toString(), is("access")); + assertThat(secretSettings.secretKey.toString(), is("secret")); + assertThat(model.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + }, exception -> fail("Unexpected exception: " + exception)); + + service.parseRequestConfig( + "id", + TaskType.TEXT_EMBEDDING, + getRequestConfigMap( + createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, null, null, null), + Map.of(), + getAmazonBedrockSecretSettingsMap("access", "secret") + ), + modelVerificationListener + ); + } + } + public void testCreateModel_ForEmbeddingsTask_DimensionsIsNotAllowed() throws IOException { try (var service = createAmazonBedrockService()) { ActionListener modelVerificationListener = ActionListener.wrap( @@ -354,6 +445,100 @@ public class AmazonBedrockServiceTests extends ESTestCase { } } + public void testParsePersistedConfigWithSecrets_CreatesAnAmazonBedrockEmbeddingsModelWithoutChunkingSettingsWhenFeatureFlagDisabled() + throws IOException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); + try (var service = createAmazonBedrockService()) { + var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null); + var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret"); + + var persistedConfig = getPersistedConfigMap(settingsMap, new HashMap(Map.of()), secretSettingsMap); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class)); + + var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings(); + assertThat(settings.region(), is("region")); + assertThat(settings.modelId(), is("model")); + assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN)); + assertNull(model.getConfigurations().getChunkingSettings()); + var secretSettings = (AmazonBedrockSecretSettings) model.getSecretSettings(); + assertThat(secretSettings.accessKey.toString(), is("access")); + assertThat(secretSettings.secretKey.toString(), is("secret")); + } + } + + public void testParsePersistedConfigWithSecrets_CreatesAnAmazonBedrockEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() + throws IOException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + try (var service = createAmazonBedrockService()) { + var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null); + var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret"); + + var persistedConfig = getPersistedConfigMap( + settingsMap, + new HashMap(Map.of()), + createRandomChunkingSettingsMap(), + secretSettingsMap + ); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class)); + + var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings(); + assertThat(settings.region(), is("region")); + assertThat(settings.modelId(), is("model")); + assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN)); + assertThat(model.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + var secretSettings = (AmazonBedrockSecretSettings) model.getSecretSettings(); + assertThat(secretSettings.accessKey.toString(), is("access")); + assertThat(secretSettings.secretKey.toString(), is("secret")); + } + } + + public + void + testParsePersistedConfigWithSecrets_CreatesAnAmazonBedrockEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() + throws IOException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + try (var service = createAmazonBedrockService()) { + var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null); + var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret"); + + var persistedConfig = getPersistedConfigMap(settingsMap, new HashMap(Map.of()), secretSettingsMap); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class)); + + var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings(); + assertThat(settings.region(), is("region")); + assertThat(settings.modelId(), is("model")); + assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN)); + assertThat(model.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + var secretSettings = (AmazonBedrockSecretSettings) model.getSecretSettings(); + assertThat(secretSettings.accessKey.toString(), is("access")); + assertThat(secretSettings.secretKey.toString(), is("secret")); + } + } + public void testParsePersistedConfigWithSecrets_ThrowsErrorTryingToParseInvalidModel() throws IOException { try (var service = createAmazonBedrockService()) { var settingsMap = createChatCompletionRequestSettingsMap("region", "model", "amazontitan"); @@ -538,6 +723,84 @@ public class AmazonBedrockServiceTests extends ESTestCase { } } + public + void + testParsePersistedConfig_CreatesAnAmazonBedrockEmbeddingsModelWithoutChunkingSettingsWhenChunkingSettingsFeatureFlagDisabled() + throws IOException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); + try (var service = createAmazonBedrockService()) { + var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null); + var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret"); + + var persistedConfig = getPersistedConfigMap( + settingsMap, + new HashMap(Map.of()), + createRandomChunkingSettingsMap(), + secretSettingsMap + ); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class)); + + var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings(); + assertThat(settings.region(), is("region")); + assertThat(settings.modelId(), is("model")); + assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN)); + assertNull(model.getConfigurations().getChunkingSettings()); + assertNull(model.getSecretSettings()); + } + } + + public void testParsePersistedConfig_CreatesAnAmazonBedrockEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() + throws IOException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + try (var service = createAmazonBedrockService()) { + var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null); + var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret"); + + var persistedConfig = getPersistedConfigMap( + settingsMap, + new HashMap(Map.of()), + createRandomChunkingSettingsMap(), + secretSettingsMap + ); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class)); + + var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings(); + assertThat(settings.region(), is("region")); + assertThat(settings.modelId(), is("model")); + assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN)); + assertThat(model.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + assertNull(model.getSecretSettings()); + } + } + + public void testParsePersistedConfig_CreatesAnAmazonBedrockEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() + throws IOException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + try (var service = createAmazonBedrockService()) { + var settingsMap = createEmbeddingsRequestSettingsMap("region", "model", "amazontitan", null, false, null, null); + var secretSettingsMap = getAmazonBedrockSecretSettingsMap("access", "secret"); + + var persistedConfig = getPersistedConfigMap(settingsMap, new HashMap(Map.of()), secretSettingsMap); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(AmazonBedrockEmbeddingsModel.class)); + + var settings = (AmazonBedrockEmbeddingsServiceSettings) model.getServiceSettings(); + assertThat(settings.region(), is("region")); + assertThat(settings.modelId(), is("model")); + assertThat(settings.provider(), is(AmazonBedrockProvider.AMAZONTITAN)); + assertThat(model.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + assertNull(model.getSecretSettings()); + } + } + public void testParsePersistedConfig_CreatesAnAmazonBedrockChatCompletionModel() throws IOException { try (var service = createAmazonBedrockService()) { var settingsMap = createChatCompletionRequestSettingsMap("region", "model", "anthropic"); @@ -1034,6 +1297,49 @@ public class AmazonBedrockServiceTests extends ESTestCase { } public void testChunkedInfer_CallsInfer_ConvertsFloatResponse_ForEmbeddings() throws IOException { + var model = AmazonBedrockEmbeddingsModelTests.createModel( + "id", + "region", + "model", + AmazonBedrockProvider.AMAZONTITAN, + "access", + "secret" + ); + + testChunkedInfer(model); + } + + public void testChunkedInfer_ChunkingSettingsSetAndFeatureFlagEnabled() throws IOException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + var model = AmazonBedrockEmbeddingsModelTests.createModel( + "id", + "region", + "model", + AmazonBedrockProvider.AMAZONTITAN, + createRandomChunkingSettings(), + "access", + "secret" + ); + + testChunkedInfer(model); + } + + public void testChunkedInfer_ChunkingSettingsNotSetAndFeatureFlagEnabled() throws IOException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + var model = AmazonBedrockEmbeddingsModelTests.createModel( + "id", + "region", + "model", + AmazonBedrockProvider.AMAZONTITAN, + null, + "access", + "secret" + ); + + testChunkedInfer(model); + } + + private void testChunkedInfer(AmazonBedrockEmbeddingsModel model) throws IOException { var sender = mock(Sender.class); var factory = mock(HttpRequestSender.Factory.class); when(factory.createSender()).thenReturn(sender); @@ -1058,14 +1364,6 @@ public class AmazonBedrockServiceTests extends ESTestCase { requestSender.enqueue(mockResults2); } - var model = AmazonBedrockEmbeddingsModelTests.createModel( - "id", - "region", - "model", - AmazonBedrockProvider.AMAZONTITAN, - "access", - "secret" - ); PlainActionFuture> listener = new PlainActionFuture<>(); service.chunkedInfer( model, @@ -1106,6 +1404,18 @@ public class AmazonBedrockServiceTests extends ESTestCase { return new AmazonBedrockService(mock(HttpRequestSender.Factory.class), amazonBedrockFactory, createWithEmptySettings(threadPool)); } + private Map getRequestConfigMap( + Map serviceSettings, + Map taskSettings, + Map chunkingSettings, + Map secretSettings + ) { + var requestConfigMap = getRequestConfigMap(serviceSettings, taskSettings, secretSettings); + requestConfigMap.put(ModelConfigurations.CHUNKING_SETTINGS, chunkingSettings); + + return requestConfigMap; + } + private Map getRequestConfigMap( Map serviceSettings, Map taskSettings, @@ -1120,6 +1430,18 @@ public class AmazonBedrockServiceTests extends ESTestCase { ); } + private Utils.PersistedConfig getPersistedConfigMap( + Map serviceSettings, + Map taskSettings, + Map chunkingSettings, + Map secretSettings + ) { + var persistedConfigMap = getPersistedConfigMap(serviceSettings, taskSettings, secretSettings); + persistedConfigMap.config().put(ModelConfigurations.CHUNKING_SETTINGS, chunkingSettings); + + return persistedConfigMap; + } + private Utils.PersistedConfig getPersistedConfigMap( Map serviceSettings, Map taskSettings, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/embeddings/AmazonBedrockEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/embeddings/AmazonBedrockEmbeddingsModelTests.java index 711e3cbb5a51..72dc696ddd81 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/embeddings/AmazonBedrockEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/embeddings/AmazonBedrockEmbeddingsModelTests.java @@ -10,6 +10,7 @@ package org.elasticsearch.xpack.inference.services.amazonbedrock.embeddings; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptyTaskSettings; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; @@ -47,6 +48,65 @@ public class AmazonBedrockEmbeddingsModelTests extends ESTestCase { return createModel(inferenceId, region, model, provider, null, false, null, null, new RateLimitSettings(240), accessKey, secretKey); } + public static AmazonBedrockEmbeddingsModel createModel( + String inferenceId, + String region, + String model, + AmazonBedrockProvider provider, + ChunkingSettings chunkingSettings, + String accessKey, + String secretKey + ) { + return createModel( + inferenceId, + region, + model, + provider, + null, + false, + null, + null, + new RateLimitSettings(240), + chunkingSettings, + accessKey, + secretKey + ); + } + + public static AmazonBedrockEmbeddingsModel createModel( + String inferenceId, + String region, + String model, + AmazonBedrockProvider provider, + @Nullable Integer dimensions, + boolean dimensionsSetByUser, + @Nullable Integer maxTokens, + @Nullable SimilarityMeasure similarity, + RateLimitSettings rateLimitSettings, + ChunkingSettings chunkingSettings, + String accessKey, + String secretKey + ) { + return new AmazonBedrockEmbeddingsModel( + inferenceId, + TaskType.TEXT_EMBEDDING, + "amazonbedrock", + new AmazonBedrockEmbeddingsServiceSettings( + region, + model, + provider, + dimensions, + dimensionsSetByUser, + maxTokens, + similarity, + rateLimitSettings + ), + new EmptyTaskSettings(), + chunkingSettings, + new AmazonBedrockSecretSettings(new SecureString(accessKey), new SecureString(secretKey)) + ); + } + public static AmazonBedrockEmbeddingsModel createModel( String inferenceId, String region, @@ -75,6 +135,7 @@ public class AmazonBedrockEmbeddingsModelTests extends ESTestCase { rateLimitSettings ), new EmptyTaskSettings(), + null, new AmazonBedrockSecretSettings(new SecureString(accessKey), new SecureString(secretKey)) ); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java index 098e41b72ea8..0fc8f3f2b0eb 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.ChunkingOptions; +import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; @@ -29,6 +30,7 @@ import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; @@ -56,6 +58,8 @@ import static org.elasticsearch.xpack.inference.Utils.getInvalidModel; import static org.elasticsearch.xpack.inference.Utils.getPersistedConfigMap; import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests.createRandomChunkingSettings; +import static org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests.createRandomChunkingSettingsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils.API_KEY_HEADER; @@ -122,6 +126,88 @@ public class AzureOpenAiServiceTests extends ESTestCase { } } + public void testParseRequestConfig_ThrowsElasticsearchStatusExceptionWhenChunkingSettingsProvidedAndFeatureFlagDisabled() + throws IOException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); + try (var service = createAzureOpenAiService()) { + var config = getRequestConfigMap( + getRequestAzureOpenAiServiceSettingsMap("resource_name", "deployment_id", "api_version", null, null), + getAzureOpenAiRequestTaskSettingsMap("user"), + createRandomChunkingSettingsMap(), + getAzureOpenAiSecretSettingsMap("secret", null) + ); + + ActionListener modelVerificationListener = ActionListener.wrap( + model -> fail("Expected exception, but got model: " + model), + exception -> { + assertThat(exception, instanceOf(ElasticsearchStatusException.class)); + assertThat(exception.getMessage(), containsString("Model configuration contains settings")); + } + ); + + service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, modelVerificationListener); + } + } + + public void testParseRequestConfig_CreatesAnOpenAiEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() + throws IOException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + try (var service = createAzureOpenAiService()) { + ActionListener modelVerificationListener = ActionListener.wrap(model -> { + assertThat(model, instanceOf(AzureOpenAiEmbeddingsModel.class)); + + var embeddingsModel = (AzureOpenAiEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().resourceName(), is("resource_name")); + assertThat(embeddingsModel.getServiceSettings().deploymentId(), is("deployment_id")); + assertThat(embeddingsModel.getServiceSettings().apiVersion(), is("api_version")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + assertThat(embeddingsModel.getTaskSettings().user(), is("user")); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + }, exception -> fail("Unexpected exception: " + exception)); + + service.parseRequestConfig( + "id", + TaskType.TEXT_EMBEDDING, + getRequestConfigMap( + getRequestAzureOpenAiServiceSettingsMap("resource_name", "deployment_id", "api_version", null, null), + getAzureOpenAiRequestTaskSettingsMap("user"), + createRandomChunkingSettingsMap(), + getAzureOpenAiSecretSettingsMap("secret", null) + ), + modelVerificationListener + ); + } + } + + public void testParseRequestConfig_CreatesAnOpenAiEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() + throws IOException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + try (var service = createAzureOpenAiService()) { + ActionListener modelVerificationListener = ActionListener.wrap(model -> { + assertThat(model, instanceOf(AzureOpenAiEmbeddingsModel.class)); + + var embeddingsModel = (AzureOpenAiEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().resourceName(), is("resource_name")); + assertThat(embeddingsModel.getServiceSettings().deploymentId(), is("deployment_id")); + assertThat(embeddingsModel.getServiceSettings().apiVersion(), is("api_version")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + assertThat(embeddingsModel.getTaskSettings().user(), is("user")); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + }, exception -> fail("Unexpected exception: " + exception)); + + service.parseRequestConfig( + "id", + TaskType.TEXT_EMBEDDING, + getRequestConfigMap( + getRequestAzureOpenAiServiceSettingsMap("resource_name", "deployment_id", "api_version", null, null), + getAzureOpenAiRequestTaskSettingsMap("user"), + getAzureOpenAiSecretSettingsMap("secret", null) + ), + modelVerificationListener + ); + } + } + public void testParseRequestConfig_ThrowsUnsupportedModelType() throws IOException { try (var service = createAzureOpenAiService()) { ActionListener modelVerificationListener = ActionListener.wrap( @@ -298,6 +384,103 @@ public class AzureOpenAiServiceTests extends ESTestCase { } } + public + void + testParsePersistedConfigWithSecrets_CreatesAnOpenAiEmbeddingsModelWithoutChunkingSettingsWhenChunkingSettingsFeatureFlagDisabled() + throws IOException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); + try (var service = createAzureOpenAiService()) { + var persistedConfig = getPersistedConfigMap( + getPersistentAzureOpenAiServiceSettingsMap("resource_name", "deployment_id", "api_version", 100, 512), + getAzureOpenAiRequestTaskSettingsMap("user"), + createRandomChunkingSettingsMap(), + getAzureOpenAiSecretSettingsMap("secret", null) + ); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(AzureOpenAiEmbeddingsModel.class)); + + var embeddingsModel = (AzureOpenAiEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().resourceName(), is("resource_name")); + assertThat(embeddingsModel.getServiceSettings().deploymentId(), is("deployment_id")); + assertThat(embeddingsModel.getServiceSettings().apiVersion(), is("api_version")); + assertThat(embeddingsModel.getServiceSettings().dimensions(), is(100)); + assertThat(embeddingsModel.getServiceSettings().maxInputTokens(), is(512)); + assertThat(embeddingsModel.getTaskSettings().user(), is("user")); + assertNull(embeddingsModel.getConfigurations().getChunkingSettings()); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParsePersistedConfigWithSecrets_CreatesAnOpenAiEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() + throws IOException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + try (var service = createAzureOpenAiService()) { + var persistedConfig = getPersistedConfigMap( + getPersistentAzureOpenAiServiceSettingsMap("resource_name", "deployment_id", "api_version", 100, 512), + getAzureOpenAiRequestTaskSettingsMap("user"), + createRandomChunkingSettingsMap(), + getAzureOpenAiSecretSettingsMap("secret", null) + ); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(AzureOpenAiEmbeddingsModel.class)); + + var embeddingsModel = (AzureOpenAiEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().resourceName(), is("resource_name")); + assertThat(embeddingsModel.getServiceSettings().deploymentId(), is("deployment_id")); + assertThat(embeddingsModel.getServiceSettings().apiVersion(), is("api_version")); + assertThat(embeddingsModel.getServiceSettings().dimensions(), is(100)); + assertThat(embeddingsModel.getServiceSettings().maxInputTokens(), is(512)); + assertThat(embeddingsModel.getTaskSettings().user(), is("user")); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParsePersistedConfigWithSecrets_CreatesAnOpenAiEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() + throws IOException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + try (var service = createAzureOpenAiService()) { + var persistedConfig = getPersistedConfigMap( + getPersistentAzureOpenAiServiceSettingsMap("resource_name", "deployment_id", "api_version", 100, 512), + getAzureOpenAiRequestTaskSettingsMap("user"), + getAzureOpenAiSecretSettingsMap("secret", null) + ); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(AzureOpenAiEmbeddingsModel.class)); + + var embeddingsModel = (AzureOpenAiEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().resourceName(), is("resource_name")); + assertThat(embeddingsModel.getServiceSettings().deploymentId(), is("deployment_id")); + assertThat(embeddingsModel.getServiceSettings().apiVersion(), is("api_version")); + assertThat(embeddingsModel.getServiceSettings().dimensions(), is(100)); + assertThat(embeddingsModel.getServiceSettings().maxInputTokens(), is(512)); + assertThat(embeddingsModel.getTaskSettings().user(), is("user")); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + public void testParsePersistedConfigWithSecrets_ThrowsErrorTryingToParseInvalidModel() throws IOException { try (var service = createAzureOpenAiService()) { var persistedConfig = getPersistedConfigMap( @@ -494,6 +677,77 @@ public class AzureOpenAiServiceTests extends ESTestCase { } } + public void testParsePersistedConfig_CreatesAnAzureOpenAiEmbeddingsModelWithoutChunkingSettingsWhenChunkingSettingsFeatureFlagDisabled() + throws IOException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); + try (var service = createAzureOpenAiService()) { + var persistedConfig = getPersistedConfigMap( + getPersistentAzureOpenAiServiceSettingsMap("resource_name", "deployment_id", "api_version", null, null), + getAzureOpenAiRequestTaskSettingsMap("user"), + createRandomChunkingSettingsMap() + ); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(AzureOpenAiEmbeddingsModel.class)); + + var embeddingsModel = (AzureOpenAiEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().resourceName(), is("resource_name")); + assertThat(embeddingsModel.getServiceSettings().deploymentId(), is("deployment_id")); + assertThat(embeddingsModel.getServiceSettings().apiVersion(), is("api_version")); + assertThat(embeddingsModel.getTaskSettings().user(), is("user")); + assertNull(embeddingsModel.getConfigurations().getChunkingSettings()); + assertNull(embeddingsModel.getSecretSettings()); + } + } + + public void testParsePersistedConfig_CreatesAnAzureOpenAiEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() + throws IOException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + try (var service = createAzureOpenAiService()) { + var persistedConfig = getPersistedConfigMap( + getPersistentAzureOpenAiServiceSettingsMap("resource_name", "deployment_id", "api_version", null, null), + getAzureOpenAiRequestTaskSettingsMap("user"), + createRandomChunkingSettingsMap() + ); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(AzureOpenAiEmbeddingsModel.class)); + + var embeddingsModel = (AzureOpenAiEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().resourceName(), is("resource_name")); + assertThat(embeddingsModel.getServiceSettings().deploymentId(), is("deployment_id")); + assertThat(embeddingsModel.getServiceSettings().apiVersion(), is("api_version")); + assertThat(embeddingsModel.getTaskSettings().user(), is("user")); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + assertNull(embeddingsModel.getSecretSettings()); + } + } + + public void testParsePersistedConfig_CreatesAnOpenAiEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() + throws IOException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + try (var service = createAzureOpenAiService()) { + var persistedConfig = getPersistedConfigMap( + getPersistentAzureOpenAiServiceSettingsMap("resource_name", "deployment_id", "api_version", null, null), + getAzureOpenAiRequestTaskSettingsMap("user") + ); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(AzureOpenAiEmbeddingsModel.class)); + + var embeddingsModel = (AzureOpenAiEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().resourceName(), is("resource_name")); + assertThat(embeddingsModel.getServiceSettings().deploymentId(), is("deployment_id")); + assertThat(embeddingsModel.getServiceSettings().apiVersion(), is("api_version")); + assertThat(embeddingsModel.getTaskSettings().user(), is("user")); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + assertNull(embeddingsModel.getSecretSettings()); + } + } + public void testParsePersistedConfig_ThrowsErrorTryingToParseInvalidModel() throws IOException { try (var service = createAzureOpenAiService()) { var persistedConfig = getPersistedConfigMap( @@ -1064,6 +1318,35 @@ public class AzureOpenAiServiceTests extends ESTestCase { } public void testChunkedInfer_CallsInfer_ConvertsFloatResponse() throws IOException, URISyntaxException { + var model = AzureOpenAiEmbeddingsModelTests.createModel("resource", "deployment", "apiversion", "user", "apikey", null, "id"); + + testChunkedInfer(model); + } + + public void testChunkedInfer_ChunkingSettingsSetAndFeatureFlagEnabled() throws IOException, URISyntaxException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + var model = AzureOpenAiEmbeddingsModelTests.createModel( + "resource", + "deployment", + "apiversion", + "user", + createRandomChunkingSettings(), + "apikey", + null, + "id" + ); + + testChunkedInfer(model); + } + + public void testChunkedInfer_ChunkingSettingsNotSetAndFeatureFlagEnabled() throws IOException, URISyntaxException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + var model = AzureOpenAiEmbeddingsModelTests.createModel("resource", "deployment", "apiversion", "user", null, "apikey", null, "id"); + + testChunkedInfer(model); + } + + private void testChunkedInfer(AzureOpenAiEmbeddingsModel model) throws IOException, URISyntaxException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); try (var service = new AzureOpenAiService(senderFactory, createWithEmptySettings(threadPool))) { @@ -1098,7 +1381,6 @@ public class AzureOpenAiServiceTests extends ESTestCase { """; webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); - var model = AzureOpenAiEmbeddingsModelTests.createModel("resource", "deployment", "apiversion", "user", "apikey", null, "id"); model.setUri(new URI(getUrl(webServer))); PlainActionFuture> listener = new PlainActionFuture<>(); service.chunkedInfer( @@ -1145,6 +1427,18 @@ public class AzureOpenAiServiceTests extends ESTestCase { return new AzureOpenAiService(mock(HttpRequestSender.Factory.class), createWithEmptySettings(threadPool)); } + private Map getRequestConfigMap( + Map serviceSettings, + Map taskSettings, + Map chunkingSettings, + Map secretSettings + ) { + var requestConfigMap = getRequestConfigMap(serviceSettings, taskSettings, secretSettings); + requestConfigMap.put(ModelConfigurations.CHUNKING_SETTINGS, chunkingSettings); + + return requestConfigMap; + } + private Map getRequestConfigMap( Map serviceSettings, Map taskSettings, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModelTests.java index 1747155623a9..2f6760cb36e9 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModelTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.xpack.inference.services.azureopenai.embeddings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.ESTestCase; @@ -100,6 +101,7 @@ public class AzureOpenAiEmbeddingsModelTests extends ESTestCase { String deploymentId, String apiVersion, String user, + ChunkingSettings chunkingSettings, @Nullable String apiKey, @Nullable String entraId, String inferenceEntityId @@ -112,6 +114,29 @@ public class AzureOpenAiEmbeddingsModelTests extends ESTestCase { "service", new AzureOpenAiEmbeddingsServiceSettings(resourceName, deploymentId, apiVersion, null, false, null, null, null), new AzureOpenAiEmbeddingsTaskSettings(user), + chunkingSettings, + new AzureOpenAiSecretSettings(secureApiKey, secureEntraId) + ); + } + + public static AzureOpenAiEmbeddingsModel createModel( + String resourceName, + String deploymentId, + String apiVersion, + String user, + @Nullable String apiKey, + @Nullable String entraId, + String inferenceEntityId + ) { + var secureApiKey = apiKey != null ? new SecureString(apiKey.toCharArray()) : null; + var secureEntraId = entraId != null ? new SecureString(entraId.toCharArray()) : null; + return new AzureOpenAiEmbeddingsModel( + inferenceEntityId, + TaskType.TEXT_EMBEDDING, + "service", + new AzureOpenAiEmbeddingsServiceSettings(resourceName, deploymentId, apiVersion, null, false, null, null, null), + new AzureOpenAiEmbeddingsTaskSettings(user), + null, new AzureOpenAiSecretSettings(secureApiKey, secureEntraId) ); } @@ -147,6 +172,7 @@ public class AzureOpenAiEmbeddingsModelTests extends ESTestCase { null ), new AzureOpenAiEmbeddingsTaskSettings(user), + null, new AzureOpenAiSecretSettings(secureApiKey, secureEntraId) ); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java index 420a635963a2..758c38166778 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.ChunkingOptions; +import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.Model; @@ -30,6 +31,7 @@ import org.elasticsearch.test.http.MockResponse; import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.ChunkingSettingsFeatureFlag; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingByteResults; import org.elasticsearch.xpack.core.inference.results.InferenceChunkedTextEmbeddingFloatResults; @@ -62,6 +64,8 @@ import static org.elasticsearch.xpack.inference.Utils.getInvalidModel; import static org.elasticsearch.xpack.inference.Utils.getPersistedConfigMap; import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests.createRandomChunkingSettings; +import static org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests.createRandomChunkingSettingsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectationFloat; @@ -130,6 +134,95 @@ public class CohereServiceTests extends ESTestCase { } } + public void testParseRequestConfig_ThrowsElasticsearchStatusExceptionWhenChunkingSettingsProvidedAndFeatureFlagDisabled() + throws IOException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); + try (var service = createCohereService()) { + var serviceSettings = CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null); + + var config = getRequestConfigMap( + serviceSettings, + getTaskSettingsMap(null, null), + createRandomChunkingSettingsMap(), + getSecretSettingsMap("secret") + ); + + var failureListener = ActionListener.wrap((model) -> fail("Model parsing should have failed"), e -> { + MatcherAssert.assertThat(e, instanceOf(ElasticsearchStatusException.class)); + MatcherAssert.assertThat(e.getMessage(), containsString("Model configuration contains settings")); + }); + service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, failureListener); + } + } + + public void testParseRequestConfig_CreatesACohereEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() throws IOException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + try (var service = createCohereService()) { + ActionListener modelListener = ActionListener.wrap(model -> { + MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); + + var embeddingsModel = (CohereEmbeddingsModel) model; + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getEmbeddingType(), is(CohereEmbeddingType.FLOAT)); + MatcherAssert.assertThat( + embeddingsModel.getTaskSettings(), + is(new CohereEmbeddingsTaskSettings(InputType.INGEST, CohereTruncation.START)) + ); + MatcherAssert.assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + }, e -> fail("Model parsing should have succeeded " + e.getMessage())); + + service.parseRequestConfig( + "id", + TaskType.TEXT_EMBEDDING, + getRequestConfigMap( + CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", CohereEmbeddingType.FLOAT), + getTaskSettingsMap(InputType.INGEST, CohereTruncation.START), + createRandomChunkingSettingsMap(), + getSecretSettingsMap("secret") + ), + modelListener + ); + + } + } + + public void testParseRequestConfig_CreatesACohereEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() + throws IOException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + try (var service = createCohereService()) { + ActionListener modelListener = ActionListener.wrap(model -> { + MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); + + var embeddingsModel = (CohereEmbeddingsModel) model; + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getEmbeddingType(), is(CohereEmbeddingType.FLOAT)); + MatcherAssert.assertThat( + embeddingsModel.getTaskSettings(), + is(new CohereEmbeddingsTaskSettings(InputType.INGEST, CohereTruncation.START)) + ); + MatcherAssert.assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + }, e -> fail("Model parsing should have succeeded " + e.getMessage())); + + service.parseRequestConfig( + "id", + TaskType.TEXT_EMBEDDING, + getRequestConfigMap( + CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", CohereEmbeddingType.FLOAT), + getTaskSettingsMap(InputType.INGEST, CohereTruncation.START), + getSecretSettingsMap("secret") + ), + modelListener + ); + + } + } + public void testParseRequestConfig_OptionalTaskSettings() throws IOException { try (var service = createCohereService()) { @@ -305,6 +398,92 @@ public class CohereServiceTests extends ESTestCase { } } + public void testParsePersistedConfigWithSecrets_CreatesACohereEmbeddingsModelWithoutChunkingSettingsWhenFeatureFlagDisabled() + throws IOException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); + try (var service = createCohereService()) { + var persistedConfig = getPersistedConfigMap( + CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null), + getTaskSettingsMap(null, null), + createRandomChunkingSettingsMap(), + getSecretSettingsMap("secret") + ); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); + + var embeddingsModel = (CohereEmbeddingsModel) model; + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, null))); + assertNull(embeddingsModel.getConfigurations().getChunkingSettings()); + MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParsePersistedConfigWithSecrets_CreatesACohereEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() + throws IOException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + try (var service = createCohereService()) { + var persistedConfig = getPersistedConfigMap( + CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null), + getTaskSettingsMap(null, null), + createRandomChunkingSettingsMap(), + getSecretSettingsMap("secret") + ); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); + + var embeddingsModel = (CohereEmbeddingsModel) model; + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, null))); + MatcherAssert.assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + + public void testParsePersistedConfigWithSecrets_CreatesACohereEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() + throws IOException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + try (var service = createCohereService()) { + var persistedConfig = getPersistedConfigMap( + CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null), + getTaskSettingsMap(null, null), + getSecretSettingsMap("secret") + ); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); + + var embeddingsModel = (CohereEmbeddingsModel) model; + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, null))); + MatcherAssert.assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + } + } + public void testParsePersistedConfigWithSecrets_ThrowsErrorTryingToParseInvalidModel() throws IOException { try (var service = createCohereService()) { var persistedConfig = getPersistedConfigMap( @@ -507,6 +686,74 @@ public class CohereServiceTests extends ESTestCase { } } + public void testParsePersistedConfig_CreatesACohereEmbeddingsModelWithoutChunkingSettingsWhenChunkingSettingsFeatureFlagDisabled() + throws IOException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is disabled", ChunkingSettingsFeatureFlag.isEnabled() == false); + try (var service = createCohereService()) { + var persistedConfig = getPersistedConfigMap( + CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null), + getTaskSettingsMap(null, CohereTruncation.NONE), + createRandomChunkingSettingsMap() + ); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); + + var embeddingsModel = (CohereEmbeddingsModel) model; + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, CohereTruncation.NONE))); + assertNull(embeddingsModel.getConfigurations().getChunkingSettings()); + assertNull(embeddingsModel.getSecretSettings()); + } + } + + public void testParsePersistedConfig_CreatesACohereEmbeddingsModelWhenChunkingSettingsProvidedAndFeatureFlagEnabled() + throws IOException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + try (var service = createCohereService()) { + var persistedConfig = getPersistedConfigMap( + CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null), + getTaskSettingsMap(null, CohereTruncation.NONE), + createRandomChunkingSettingsMap() + ); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); + + var embeddingsModel = (CohereEmbeddingsModel) model; + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, CohereTruncation.NONE))); + MatcherAssert.assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + assertNull(embeddingsModel.getSecretSettings()); + } + } + + public void testParsePersistedConfig_CreatesACohereEmbeddingsModelWhenChunkingSettingsNotProvidedAndFeatureFlagEnabled() + throws IOException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + try (var service = createCohereService()) { + var persistedConfig = getPersistedConfigMap( + CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", "model", null), + getTaskSettingsMap(null, CohereTruncation.NONE) + ); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); + + var embeddingsModel = (CohereEmbeddingsModel) model; + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().uri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().modelId(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, CohereTruncation.NONE))); + MatcherAssert.assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + assertNull(embeddingsModel.getSecretSettings()); + } + } + public void testParsePersistedConfig_ThrowsErrorTryingToParseInvalidModel() throws IOException { try (var service = createCohereService()) { var persistedConfig = getPersistedConfigMap( @@ -1164,6 +1411,52 @@ public class CohereServiceTests extends ESTestCase { } public void testChunkedInfer_BatchesCalls() throws IOException { + var model = CohereEmbeddingsModelTests.createModel( + getUrl(webServer), + "secret", + new CohereEmbeddingsTaskSettings(null, null), + 1024, + 1024, + "model", + null + ); + + testChunkedInfer(model); + } + + public void testChunkedInfer_BatchesCallsChunkingSettingsSetAndFeatureFlagEnabled() throws IOException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + var model = CohereEmbeddingsModelTests.createModel( + getUrl(webServer), + "secret", + new CohereEmbeddingsTaskSettings(null, null), + createRandomChunkingSettings(), + 1024, + 1024, + "model", + null + ); + + testChunkedInfer(model); + } + + public void testChunkedInfer_ChunkingSettingsNotSetAndFeatureFlagEnabled() throws IOException { + assumeTrue("Only if 'inference_chunking_settings' feature flag is enabled", ChunkingSettingsFeatureFlag.isEnabled()); + var model = CohereEmbeddingsModelTests.createModel( + getUrl(webServer), + "secret", + new CohereEmbeddingsTaskSettings(null, null), + null, + 1024, + 1024, + "model", + null + ); + + testChunkedInfer(model); + } + + private void testChunkedInfer(CohereEmbeddingsModel model) throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); try (var service = new CohereService(senderFactory, createWithEmptySettings(threadPool))) { @@ -1200,15 +1493,6 @@ public class CohereServiceTests extends ESTestCase { """; webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); - var model = CohereEmbeddingsModelTests.createModel( - getUrl(webServer), - "secret", - new CohereEmbeddingsTaskSettings(null, null), - 1024, - 1024, - "model", - null - ); PlainActionFuture> listener = new PlainActionFuture<>(); // 2 inputs service.chunkedInfer( @@ -1399,6 +1683,18 @@ public class CohereServiceTests extends ESTestCase { .hasErrorContaining("how dare you"); } + private Map getRequestConfigMap( + Map serviceSettings, + Map taskSettings, + Map chunkingSettings, + Map secretSettings + ) { + var requestConfigMap = getRequestConfigMap(serviceSettings, taskSettings, secretSettings); + requestConfigMap.put(ModelConfigurations.CHUNKING_SETTINGS, chunkingSettings); + + return requestConfigMap; + } + private Map getRequestConfigMap( Map serviceSettings, Map taskSettings, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModelTests.java index 093283c0b37d..670e63a85cf9 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModelTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.xpack.inference.services.cohere.embeddings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.inference.TaskType; @@ -208,6 +209,7 @@ public class CohereEmbeddingsModelTests extends ESTestCase { String url, String apiKey, CohereEmbeddingsTaskSettings taskSettings, + ChunkingSettings chunkingSettings, @Nullable Integer tokenLimit, @Nullable Integer dimensions, @Nullable String model, @@ -222,6 +224,30 @@ public class CohereEmbeddingsModelTests extends ESTestCase { Objects.requireNonNullElse(embeddingType, CohereEmbeddingType.FLOAT) ), taskSettings, + chunkingSettings, + new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) + ); + } + + public static CohereEmbeddingsModel createModel( + String url, + String apiKey, + CohereEmbeddingsTaskSettings taskSettings, + @Nullable Integer tokenLimit, + @Nullable Integer dimensions, + @Nullable String model, + @Nullable CohereEmbeddingType embeddingType + ) { + return new CohereEmbeddingsModel( + "id", + TaskType.TEXT_EMBEDDING, + "service", + new CohereEmbeddingsServiceSettings( + new CohereServiceSettings(url, SimilarityMeasure.DOT_PRODUCT, dimensions, tokenLimit, model, null), + Objects.requireNonNullElse(embeddingType, CohereEmbeddingType.FLOAT) + ), + taskSettings, + null, new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) ); } @@ -245,6 +271,7 @@ public class CohereEmbeddingsModelTests extends ESTestCase { Objects.requireNonNullElse(embeddingType, CohereEmbeddingType.FLOAT) ), taskSettings, + null, new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) ); } From a68fd70b15377c21421198d8dea9e26a7fb02861 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 9 Oct 2024 01:06:40 +1100 Subject: [PATCH 46/85] Mute org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT test {p0=aggregations/stats_metric_fail_formatting/fail formatting} #114320 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index d6d5d47af3a7..2687f7c2a1f5 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -376,6 +376,9 @@ tests: - class: org.elasticsearch.index.SearchSlowLogTests method: testTwoLoggersDifferentLevel issue: https://github.com/elastic/elasticsearch/issues/114301 +- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT + method: test {p0=aggregations/stats_metric_fail_formatting/fail formatting} + issue: https://github.com/elastic/elasticsearch/issues/114320 # Examples: # From 0a0a4cb9ad24740040c1fce34367506ece843279 Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Wed, 9 Oct 2024 01:39:19 +1100 Subject: [PATCH 47/85] Update docker.elastic.co/wolfi/chainguard-base:latest Docker digest to 90888b1 (#114284) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Update | Change | |---|---|---| | docker.elastic.co/wolfi/chainguard-base | digest | `c16d3ad` -> `90888b1` | --- ### Configuration 📅 **Schedule**: Branch creation - At any time (no schedule defined), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [Renovate Bot](https://togithub.com/renovatebot/renovate). --- build-tools-internal/build.gradle | 9 +++++++++ .../gradle/internal/DockerBase.java | 6 ++++-- renovate.json | 18 +++++++++++++++++- 3 files changed, 30 insertions(+), 3 deletions(-) diff --git a/build-tools-internal/build.gradle b/build-tools-internal/build.gradle index 949091816a72..38d3c0cd326f 100644 --- a/build-tools-internal/build.gradle +++ b/build-tools-internal/build.gradle @@ -384,6 +384,15 @@ tasks.named("jar") { exclude("classpath.index") } +spotless { + java { + // IDEs can sometimes run annotation processors that leave files in + // here, causing Spotless to complain. Even though this path ought not + // to exist, exclude it anyway in order to avoid spurious failures. + toggleOffOn() + } +} + def resolveMainWrapperVersion() { new URL("https://raw.githubusercontent.com/elastic/elasticsearch/main/build-tools-internal/src/main/resources/minimumGradleVersion").text.trim() } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java index 793ff6049e10..95f279bfa516 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java @@ -29,11 +29,13 @@ public enum DockerBase { CLOUD_ESS(null, "-cloud-ess", "apt-get"), // Chainguard based wolfi image with latest jdk - WOLFI( - "docker.elastic.co/wolfi/chainguard-base:latest@sha256:c16d3ad6cebf387e8dd2ad769f54320c4819fbbaa21e729fad087c7ae223b4d0", + // This is usually updated via renovatebot + // spotless:off + WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:90888b190da54062f67f3fef1372eb0ae7d81ea55f5a1f56d748b13e4853d984", "-wolfi", "apk" ), + // spotless:on // Based on WOLFI above, with more extras. We don't set a base image because // we programmatically extend from the Wolfi image. diff --git a/renovate.json b/renovate.json index 7dde3a9440ed..0a1d588e6332 100644 --- a/renovate.json +++ b/renovate.json @@ -1,7 +1,23 @@ { "$schema": "https://docs.renovatebot.com/renovate-schema.json", "extends": [ - "github>elastic/renovate-config:only-chainguard" + "github>elastic/renovate-config:only-chainguard", + ":disableDependencyDashboard" + ], + "labels": [">non-issue", ":Delivery/Packaging", "Team:Delivery"], + "baseBranches": ["main", "8.x"], + "packageRules": [ + { + "groupName": "wolfi (versioned)", + "groupSlug": "wolfi-versioned", + "description": "Override the `groupSlug` to create a non-special-character branch name", + "matchDatasources": [ + "docker" + ], + "matchPackagePatterns": [ + "^docker.elastic.co/wolfi/chainguard-base$" + ] + } ], "customManagers": [ { From 0cc05448c3f1ae5e1e13ec7b4d110393982f7db4 Mon Sep 17 00:00:00 2001 From: Mike Pellegrini Date: Tue, 8 Oct 2024 10:49:59 -0400 Subject: [PATCH 48/85] Use ELSER By Default For Semantic Text (#113563) Co-authored-by: David Kyle --- docs/changelog/113563.yaml | 5 + .../xpack/inference/InferenceFeatures.java | 18 ++- .../inference/mapper/SemanticTextField.java | 1 + .../mapper/SemanticTextFieldMapper.java | 31 +++- .../mapper/SemanticTextFieldMapperTests.java | 143 +++++++++++++++--- .../inference/30_semantic_text_inference.yml | 31 ++++ .../test/inference/40_semantic_text_query.yml | 35 +++++ 7 files changed, 229 insertions(+), 35 deletions(-) create mode 100644 docs/changelog/113563.yaml diff --git a/docs/changelog/113563.yaml b/docs/changelog/113563.yaml new file mode 100644 index 000000000000..48484ead99d7 --- /dev/null +++ b/docs/changelog/113563.yaml @@ -0,0 +1,5 @@ +pr: 113563 +summary: Use ELSER By Default For Semantic Text +area: Mapping +type: enhancement +issues: [] diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java index a3f210505463..87b7be717d31 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.inference.queries.SemanticQueryBuilder; import org.elasticsearch.xpack.inference.rank.random.RandomRankRetrieverBuilder; import org.elasticsearch.xpack.inference.rank.textsimilarity.TextSimilarityRankRetrieverBuilder; +import java.util.HashSet; import java.util.Set; /** @@ -23,13 +24,16 @@ public class InferenceFeatures implements FeatureSpecification { @Override public Set getFeatures() { - return Set.of( - TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_RETRIEVER_SUPPORTED, - RandomRankRetrieverBuilder.RANDOM_RERANKER_RETRIEVER_SUPPORTED, - SemanticTextFieldMapper.SEMANTIC_TEXT_SEARCH_INFERENCE_ID, - SemanticQueryBuilder.SEMANTIC_TEXT_INNER_HITS, - TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_COMPOSITION_SUPPORTED - ); + var features = new HashSet(); + features.add(TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_RETRIEVER_SUPPORTED); + features.add(RandomRankRetrieverBuilder.RANDOM_RERANKER_RETRIEVER_SUPPORTED); + features.add(SemanticTextFieldMapper.SEMANTIC_TEXT_SEARCH_INFERENCE_ID); + features.add(SemanticQueryBuilder.SEMANTIC_TEXT_INNER_HITS); + features.add(TextSimilarityRankRetrieverBuilder.TEXT_SIMILARITY_RERANKER_COMPOSITION_SUPPORTED); + if (DefaultElserFeatureFlag.isEnabled()) { + features.add(SemanticTextFieldMapper.SEMANTIC_TEXT_DEFAULT_ELSER_2); + } + return Set.copyOf(features); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextField.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextField.java index 0c807c116660..e60e95b58770 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextField.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextField.java @@ -58,6 +58,7 @@ public record SemanticTextField(String fieldName, List originalValues, I static final String TEXT_FIELD = "text"; static final String INFERENCE_FIELD = "inference"; static final String INFERENCE_ID_FIELD = "inference_id"; + static final String SEARCH_INFERENCE_ID_FIELD = "search_inference_id"; static final String CHUNKS_FIELD = "chunks"; static final String CHUNKED_EMBEDDINGS_FIELD = "embeddings"; static final String CHUNKED_TEXT_FIELD = "text"; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java index 0483296cd2c6..a5702b38ea3f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java @@ -54,6 +54,7 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xpack.core.ml.inference.results.MlTextEmbeddingResults; import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; +import org.elasticsearch.xpack.inference.DefaultElserFeatureFlag; import java.io.IOException; import java.util.ArrayList; @@ -71,18 +72,23 @@ import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.CHUNKED import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.CHUNKS_FIELD; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.INFERENCE_FIELD; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.INFERENCE_ID_FIELD; +import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.MODEL_SETTINGS_FIELD; +import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.SEARCH_INFERENCE_ID_FIELD; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.TEXT_FIELD; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.getChunksFieldName; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.getEmbeddingsFieldName; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.getOriginalTextFieldName; +import static org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService.DEFAULT_ELSER_ID; /** * A {@link FieldMapper} for semantic text fields. */ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFieldMapper { public static final NodeFeature SEMANTIC_TEXT_SEARCH_INFERENCE_ID = new NodeFeature("semantic_text.search_inference_id"); + public static final NodeFeature SEMANTIC_TEXT_DEFAULT_ELSER_2 = new NodeFeature("semantic_text.default_elser_2"); public static final String CONTENT_TYPE = "semantic_text"; + public static final String DEFAULT_ELSER_2_INFERENCE_ID = DEFAULT_ELSER_ID; private final IndexSettings indexSettings; @@ -96,25 +102,37 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie private final IndexSettings indexSettings; private final Parameter inferenceId = Parameter.stringParam( - "inference_id", + INFERENCE_ID_FIELD, false, mapper -> ((SemanticTextFieldType) mapper.fieldType()).inferenceId, - null + DefaultElserFeatureFlag.isEnabled() ? DEFAULT_ELSER_2_INFERENCE_ID : null ).addValidator(v -> { if (Strings.isEmpty(v)) { - throw new IllegalArgumentException("field [inference_id] must be specified"); + // If the default ELSER feature flag is enabled, the only way we get here is if the user explicitly sets the param to an + // empty value. However, if the feature flag is disabled, we can get here if the user didn't set the param. + // Adjust the error message appropriately. + String message = DefaultElserFeatureFlag.isEnabled() + ? "[" + INFERENCE_ID_FIELD + "] on mapper [" + leafName() + "] of type [" + CONTENT_TYPE + "] must not be empty" + : "[" + INFERENCE_ID_FIELD + "] on mapper [" + leafName() + "] of type [" + CONTENT_TYPE + "] must be specified"; + throw new IllegalArgumentException(message); } }); private final Parameter searchInferenceId = Parameter.stringParam( - "search_inference_id", + SEARCH_INFERENCE_ID_FIELD, true, mapper -> ((SemanticTextFieldType) mapper.fieldType()).searchInferenceId, null - ).acceptsNull(); + ).acceptsNull().addValidator(v -> { + if (v != null && Strings.isEmpty(v)) { + throw new IllegalArgumentException( + "[" + SEARCH_INFERENCE_ID_FIELD + "] on mapper [" + leafName() + "] of type [" + CONTENT_TYPE + "] must not be empty" + ); + } + }); private final Parameter modelSettings = new Parameter<>( - "model_settings", + MODEL_SETTINGS_FIELD, true, () -> null, (n, c, o) -> SemanticTextField.parseModelSettingsFromMap(o), @@ -204,6 +222,7 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie } var childContext = context.createChildContext(leafName(), ObjectMapper.Dynamic.FALSE); final ObjectMapper inferenceField = inferenceFieldBuilder.apply(childContext); + return new SemanticTextFieldMapper( leafName(), new SemanticTextFieldType( diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java index 1697b33fedd9..7c8d1bbf9fb4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.search.join.QueryBitSetProducer; import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.CheckedBiFunction; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; @@ -58,6 +59,7 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.inference.DefaultElserFeatureFlag; import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.inference.model.TestModel; import org.junit.AssumptionViolatedException; @@ -77,8 +79,10 @@ import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.CHUNKS_ import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.INFERENCE_FIELD; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.INFERENCE_ID_FIELD; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.MODEL_SETTINGS_FIELD; +import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.SEARCH_INFERENCE_ID_FIELD; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.getChunksFieldName; import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.getEmbeddingsFieldName; +import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldMapper.DEFAULT_ELSER_2_INFERENCE_ID; import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldTests.randomSemanticText; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -92,7 +96,10 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { @Override protected void minimalMapping(XContentBuilder b) throws IOException { - b.field("type", "semantic_text").field("inference_id", "test_model"); + b.field("type", "semantic_text"); + if (DefaultElserFeatureFlag.isEnabled() == false) { + b.field("inference_id", "test_model"); + } } @Override @@ -155,8 +162,16 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { } public void testDefaults() throws Exception { - DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); - assertEquals(Strings.toString(fieldMapping(this::minimalMapping)), mapper.mappingSource().toString()); + final String fieldName = "field"; + final XContentBuilder fieldMapping = fieldMapping(this::minimalMapping); + + MapperService mapperService = createMapperService(fieldMapping); + DocumentMapper mapper = mapperService.documentMapper(); + assertEquals(Strings.toString(fieldMapping), mapper.mappingSource().toString()); + assertSemanticTextField(mapperService, fieldName, false); + if (DefaultElserFeatureFlag.isEnabled()) { + assertInferenceEndpoints(mapperService, fieldName, DEFAULT_ELSER_2_INFERENCE_ID, DEFAULT_ELSER_2_INFERENCE_ID); + } ParsedDocument doc1 = mapper.parse(source(this::writeField)); List fields = doc1.rootDoc().getFields("field"); @@ -172,12 +187,80 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { assertTrue(fieldType.fieldHasValue(fieldInfos)); } - public void testInferenceIdNotPresent() { - Exception e = expectThrows( - MapperParsingException.class, - () -> createMapperService(fieldMapping(b -> b.field("type", "semantic_text"))) - ); - assertThat(e.getMessage(), containsString("field [inference_id] must be specified")); + public void testSetInferenceEndpoints() throws IOException { + final String fieldName = "field"; + final String inferenceId = "foo"; + final String searchInferenceId = "bar"; + + CheckedBiConsumer assertSerialization = (expectedMapping, mapperService) -> { + DocumentMapper mapper = mapperService.documentMapper(); + assertEquals(Strings.toString(expectedMapping), mapper.mappingSource().toString()); + }; + + { + final XContentBuilder fieldMapping = fieldMapping(b -> b.field("type", "semantic_text").field(INFERENCE_ID_FIELD, inferenceId)); + final MapperService mapperService = createMapperService(fieldMapping); + assertSemanticTextField(mapperService, fieldName, false); + assertInferenceEndpoints(mapperService, fieldName, inferenceId, inferenceId); + assertSerialization.accept(fieldMapping, mapperService); + } + { + if (DefaultElserFeatureFlag.isEnabled()) { + final XContentBuilder fieldMapping = fieldMapping( + b -> b.field("type", "semantic_text").field(SEARCH_INFERENCE_ID_FIELD, searchInferenceId) + ); + final MapperService mapperService = createMapperService(fieldMapping); + assertSemanticTextField(mapperService, fieldName, false); + assertInferenceEndpoints(mapperService, fieldName, DEFAULT_ELSER_2_INFERENCE_ID, searchInferenceId); + assertSerialization.accept(fieldMapping, mapperService); + } + } + { + final XContentBuilder fieldMapping = fieldMapping( + b -> b.field("type", "semantic_text") + .field(INFERENCE_ID_FIELD, inferenceId) + .field(SEARCH_INFERENCE_ID_FIELD, searchInferenceId) + ); + MapperService mapperService = createMapperService(fieldMapping); + assertSemanticTextField(mapperService, fieldName, false); + assertInferenceEndpoints(mapperService, fieldName, inferenceId, searchInferenceId); + assertSerialization.accept(fieldMapping, mapperService); + } + } + + public void testInvalidInferenceEndpoints() { + { + Exception e = expectThrows( + MapperParsingException.class, + () -> createMapperService(fieldMapping(b -> b.field("type", "semantic_text").field(INFERENCE_ID_FIELD, (String) null))) + ); + assertThat( + e.getMessage(), + containsString("[inference_id] on mapper [field] of type [semantic_text] must not have a [null] value") + ); + } + { + final String expectedMessage = DefaultElserFeatureFlag.isEnabled() + ? "[inference_id] on mapper [field] of type [semantic_text] must not be empty" + : "[inference_id] on mapper [field] of type [semantic_text] must be specified"; + Exception e = expectThrows( + MapperParsingException.class, + () -> createMapperService(fieldMapping(b -> b.field("type", "semantic_text").field(INFERENCE_ID_FIELD, ""))) + ); + assertThat(e.getMessage(), containsString(expectedMessage)); + } + { + if (DefaultElserFeatureFlag.isEnabled()) { + Exception e = expectThrows( + MapperParsingException.class, + () -> createMapperService(fieldMapping(b -> b.field("type", "semantic_text").field(SEARCH_INFERENCE_ID_FIELD, ""))) + ); + assertThat( + e.getMessage(), + containsString("[search_inference_id] on mapper [field] of type [semantic_text] must not be empty") + ); + } + } } public void testCannotBeUsedInMultiFields() { @@ -221,7 +304,7 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, null, null, null) ); assertSemanticTextField(mapperService, fieldName, true); - assertSearchInferenceId(mapperService, fieldName, inferenceId); + assertInferenceEndpoints(mapperService, fieldName, inferenceId, inferenceId); } { @@ -232,7 +315,7 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, null, null, null) ); assertSemanticTextField(mapperService, fieldName, true); - assertSearchInferenceId(mapperService, fieldName, searchInferenceId); + assertInferenceEndpoints(mapperService, fieldName, inferenceId, searchInferenceId); } } @@ -331,19 +414,19 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { String fieldName = randomFieldName(depth); MapperService mapperService = createMapperService(buildMapping.apply(fieldName, null)); assertSemanticTextField(mapperService, fieldName, false); - assertSearchInferenceId(mapperService, fieldName, inferenceId); + assertInferenceEndpoints(mapperService, fieldName, inferenceId, inferenceId); merge(mapperService, buildMapping.apply(fieldName, searchInferenceId1)); assertSemanticTextField(mapperService, fieldName, false); - assertSearchInferenceId(mapperService, fieldName, searchInferenceId1); + assertInferenceEndpoints(mapperService, fieldName, inferenceId, searchInferenceId1); merge(mapperService, buildMapping.apply(fieldName, searchInferenceId2)); assertSemanticTextField(mapperService, fieldName, false); - assertSearchInferenceId(mapperService, fieldName, searchInferenceId2); + assertInferenceEndpoints(mapperService, fieldName, inferenceId, searchInferenceId2); merge(mapperService, buildMapping.apply(fieldName, null)); assertSemanticTextField(mapperService, fieldName, false); - assertSearchInferenceId(mapperService, fieldName, inferenceId); + assertInferenceEndpoints(mapperService, fieldName, inferenceId, inferenceId); mapperService = mapperServiceForFieldWithModelSettings( fieldName, @@ -351,19 +434,19 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { new SemanticTextField.ModelSettings(TaskType.SPARSE_EMBEDDING, null, null, null) ); assertSemanticTextField(mapperService, fieldName, true); - assertSearchInferenceId(mapperService, fieldName, inferenceId); + assertInferenceEndpoints(mapperService, fieldName, inferenceId, inferenceId); merge(mapperService, buildMapping.apply(fieldName, searchInferenceId1)); assertSemanticTextField(mapperService, fieldName, true); - assertSearchInferenceId(mapperService, fieldName, searchInferenceId1); + assertInferenceEndpoints(mapperService, fieldName, inferenceId, searchInferenceId1); merge(mapperService, buildMapping.apply(fieldName, searchInferenceId2)); assertSemanticTextField(mapperService, fieldName, true); - assertSearchInferenceId(mapperService, fieldName, searchInferenceId2); + assertInferenceEndpoints(mapperService, fieldName, inferenceId, searchInferenceId2); merge(mapperService, buildMapping.apply(fieldName, null)); assertSemanticTextField(mapperService, fieldName, true); - assertSearchInferenceId(mapperService, fieldName, inferenceId); + assertInferenceEndpoints(mapperService, fieldName, inferenceId, inferenceId); } } @@ -409,11 +492,17 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { } } - private static void assertSearchInferenceId(MapperService mapperService, String fieldName, String expectedSearchInferenceId) { + private static void assertInferenceEndpoints( + MapperService mapperService, + String fieldName, + String expectedInferenceId, + String expectedSearchInferenceId + ) { var fieldType = mapperService.fieldType(fieldName); assertNotNull(fieldType); assertThat(fieldType, instanceOf(SemanticTextFieldMapper.SemanticTextFieldType.class)); SemanticTextFieldMapper.SemanticTextFieldType semanticTextFieldType = (SemanticTextFieldMapper.SemanticTextFieldType) fieldType; + assertEquals(expectedInferenceId, semanticTextFieldType.getInferenceId()); assertEquals(expectedSearchInferenceId, semanticTextFieldType.getSearchInferenceId()); } @@ -433,9 +522,19 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { MapperService mapperService = createMapperService(mapping); assertSemanticTextField(mapperService, fieldName1, false); - assertSearchInferenceId(mapperService, fieldName1, setSearchInferenceId ? searchInferenceId : model1.getInferenceEntityId()); + assertInferenceEndpoints( + mapperService, + fieldName1, + model1.getInferenceEntityId(), + setSearchInferenceId ? searchInferenceId : model1.getInferenceEntityId() + ); assertSemanticTextField(mapperService, fieldName2, false); - assertSearchInferenceId(mapperService, fieldName2, setSearchInferenceId ? searchInferenceId : model2.getInferenceEntityId()); + assertInferenceEndpoints( + mapperService, + fieldName2, + model2.getInferenceEntityId(), + setSearchInferenceId ? searchInferenceId : model2.getInferenceEntityId() + ); DocumentMapper documentMapper = mapperService.documentMapper(); ParsedDocument doc = documentMapper.parse( diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml index f58a5c33fd85..1795d754d2a9 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml @@ -547,3 +547,34 @@ setup: - match: { _source.dense_field.text: "another updated inference test" } - match: { _source.dense_field.inference.chunks.0.text: "another updated inference test" } - exists: _source.dense_field.inference.chunks.0.embeddings + +--- +"Calculates embeddings using the default ELSER 2 endpoint": + - requires: + cluster_features: "semantic_text.default_elser_2" + reason: semantic_text default ELSER 2 inference ID introduced in 8.16.0 + + - do: + indices.create: + index: test-elser-2-default-index + body: + mappings: + properties: + sparse_field: + type: semantic_text + + - do: + index: + index: test-elser-2-default-index + id: doc_1 + body: + sparse_field: "inference test" + + - do: + get: + index: test-elser-2-default-index + id: doc_1 + + - match: { _source.sparse_field.text: "inference test" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml index 2070b3752791..10858acc0aff 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml @@ -839,3 +839,38 @@ setup: - match: { error.type: "resource_not_found_exception" } - match: { error.reason: "Inference endpoint not found [invalid-inference-id]" } + +--- +"Query a field that uses the default ELSER 2 endpoint": + - requires: + cluster_features: "semantic_text.default_elser_2" + reason: semantic_text default ELSER 2 inference ID introduced in 8.16.0 + + - do: + indices.create: + index: test-elser-2-default-index + body: + mappings: + properties: + sparse_field: + type: semantic_text + + - do: + index: + index: test-elser-2-default-index + id: doc_1 + body: + sparse_field: "inference test" + refresh: true + + - do: + search: + index: test-elser-2-default-index + body: + query: + semantic: + field: "sparse_field" + query: "inference test" + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } From 72b7d5ecf8e4195957cee32257085dc20f14ce1e Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 9 Oct 2024 02:05:32 +1100 Subject: [PATCH 49/85] Mute org.elasticsearch.xpack.inference.services.cohere.CohereServiceTests testInfer_StreamRequest_ErrorResponse #114327 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 2687f7c2a1f5..bc573959fb2f 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -379,6 +379,9 @@ tests: - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT method: test {p0=aggregations/stats_metric_fail_formatting/fail formatting} issue: https://github.com/elastic/elasticsearch/issues/114320 +- class: org.elasticsearch.xpack.inference.services.cohere.CohereServiceTests + method: testInfer_StreamRequest_ErrorResponse + issue: https://github.com/elastic/elasticsearch/issues/114327 # Examples: # From ae38b9091ff2689331aac60986cfe155ef45e5b0 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 9 Oct 2024 02:30:01 +1100 Subject: [PATCH 50/85] Mute org.elasticsearch.xpack.rank.rrf.RRFRankClientYamlTestSuiteIT test {yaml=rrf/700_rrf_retriever_search_api_compatibility/rrf retriever with top-level collapse} #114331 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index bc573959fb2f..302bbbf80c41 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -382,6 +382,9 @@ tests: - class: org.elasticsearch.xpack.inference.services.cohere.CohereServiceTests method: testInfer_StreamRequest_ErrorResponse issue: https://github.com/elastic/elasticsearch/issues/114327 +- class: org.elasticsearch.xpack.rank.rrf.RRFRankClientYamlTestSuiteIT + method: test {yaml=rrf/700_rrf_retriever_search_api_compatibility/rrf retriever with top-level collapse} + issue: https://github.com/elastic/elasticsearch/issues/114331 # Examples: # From 3a83fcdef969fdd376720ff0b57953623ca0d8b4 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 8 Oct 2024 16:37:04 +0100 Subject: [PATCH 51/85] [ML] Remove scale to zero feature flag (#114323) --- .../test/cluster/FeatureFlag.java | 1 - .../AdaptiveAllocationsScaler.java | 3 +-- .../AdaptiveAllocationsScalerService.java | 3 +-- .../ScaleToZeroFeatureFlag.java | 20 ------------------- .../AdaptiveAllocationsScalerTests.java | 4 ---- 5 files changed, 2 insertions(+), 29 deletions(-) delete mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/ScaleToZeroFeatureFlag.java diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java index 5adf01a2a0e7..aa72d3248812 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java @@ -19,7 +19,6 @@ public enum FeatureFlag { TIME_SERIES_MODE("es.index_mode_feature_flag_registered=true", Version.fromString("8.0.0"), null), FAILURE_STORE_ENABLED("es.failure_store_feature_flag_enabled=true", Version.fromString("8.12.0"), null), CHUNKING_SETTINGS_ENABLED("es.inference_chunking_settings_feature_flag_enabled=true", Version.fromString("8.16.0"), null), - INFERENCE_SCALE_TO_ZERO("es.inference_scale_to_zero_feature_flag_enabled=true", Version.fromString("8.16.0"), null), INFERENCE_DEFAULT_ELSER("es.inference_default_elser_feature_flag_enabled=true", Version.fromString("8.16.0"), null); public final String systemProperty; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScaler.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScaler.java index 05e7202b8efe..58259b87c6b0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScaler.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScaler.java @@ -170,8 +170,7 @@ public class AdaptiveAllocationsScaler { if (maxNumberOfAllocations != null) { numberOfAllocations = Math.min(numberOfAllocations, maxNumberOfAllocations); } - if (ScaleToZeroFeatureFlag.isEnabled() - && (minNumberOfAllocations == null || minNumberOfAllocations == 0) + if ((minNumberOfAllocations == null || minNumberOfAllocations == 0) && timeWithoutRequestsSeconds > SCALE_TO_ZERO_AFTER_NO_REQUESTS_TIME_SECONDS) { logger.debug("[{}] adaptive allocations scaler: scaling down to zero, because of no requests.", deploymentId); numberOfAllocations = 0; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java index 775279a6b255..193fa9e7e07f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerService.java @@ -421,8 +421,7 @@ public class AdaptiveAllocationsScalerService implements ClusterStateListener { } public boolean maybeStartAllocation(TrainedModelAssignment assignment) { - if (ScaleToZeroFeatureFlag.isEnabled() - && assignment.getAdaptiveAllocationsSettings() != null + if (assignment.getAdaptiveAllocationsSettings() != null && assignment.getAdaptiveAllocationsSettings().getEnabled() == Boolean.TRUE) { lastScaleUpTimesMillis.put(assignment.getDeploymentId(), System.currentTimeMillis()); updateNumberOfAllocations(assignment.getDeploymentId(), 1); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/ScaleToZeroFeatureFlag.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/ScaleToZeroFeatureFlag.java deleted file mode 100644 index 072b8c5593c9..000000000000 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/ScaleToZeroFeatureFlag.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.ml.inference.adaptiveallocations; - -import org.elasticsearch.common.util.FeatureFlag; - -public class ScaleToZeroFeatureFlag { - private ScaleToZeroFeatureFlag() {} - - private static final FeatureFlag FEATURE_FLAG = new FeatureFlag("inference_scale_to_zero"); - - public static boolean isEnabled() { - return FEATURE_FLAG.isEnabled(); - } -} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerTests.java index 7d98aaf67a7f..1887ebe8050e 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/adaptiveallocations/AdaptiveAllocationsScalerTests.java @@ -148,8 +148,6 @@ public class AdaptiveAllocationsScalerTests extends ESTestCase { } public void testAutoscaling_scaleDownToZeroAllocations() { - assumeTrue("Should only run if adaptive allocations feature flag is enabled", ScaleToZeroFeatureFlag.isEnabled()); - AdaptiveAllocationsScaler adaptiveAllocationsScaler = new AdaptiveAllocationsScaler("test-deployment", 1); // 1 hour with 1 request per 1 seconds, so don't scale. for (int i = 0; i < 3600; i++) { @@ -180,8 +178,6 @@ public class AdaptiveAllocationsScalerTests extends ESTestCase { } public void testAutoscaling_dontScaleDownToZeroAllocationsWhenMinAllocationsIsSet() { - assumeTrue("Should only run if adaptive allocations feature flag is enabled", ScaleToZeroFeatureFlag.isEnabled()); - AdaptiveAllocationsScaler adaptiveAllocationsScaler = new AdaptiveAllocationsScaler("test-deployment", 1); adaptiveAllocationsScaler.setMinMaxNumberOfAllocations(1, null); From 965265a1a47507548640da3a7c179a6b78126ef3 Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Tue, 8 Oct 2024 09:14:03 -0700 Subject: [PATCH 52/85] Don't generate invalid combination of subobjects parameter in logsdb tests (#114265) --- muted-tests.yml | 3 --- .../DefaultMappingParametersHandler.java | 19 ++++++++----------- 2 files changed, 8 insertions(+), 14 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index 302bbbf80c41..696d7a4496e6 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -365,9 +365,6 @@ tests: - class: org.elasticsearch.xpack.inference.services.openai.OpenAiServiceTests method: testInfer_StreamRequest issue: https://github.com/elastic/elasticsearch/issues/114232 -- class: org.elasticsearch.logsdb.datageneration.DataGeneratorTests - method: testDataGeneratorProducesValidMappingAndDocument - issue: https://github.com/elastic/elasticsearch/issues/114188 - class: org.elasticsearch.ingest.geoip.IpinfoIpDataLookupsTests issue: https://github.com/elastic/elasticsearch/issues/114266 - class: org.elasticsearch.index.SearchSlowLogTests diff --git a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java index 81bd80f46452..4b33f3fefcf1 100644 --- a/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/logsdb/datageneration/datasource/DefaultMappingParametersHandler.java @@ -102,18 +102,12 @@ public class DefaultMappingParametersHandler implements DataSourceHandler { // TODO enable subobjects: auto // It is disabled because it currently does not have auto flattening and that results in asserts being triggered when using // copy_to. - if (ESTestCase.randomBoolean()) { - parameters.put( - "subobjects", - ESTestCase.randomValueOtherThan( - ObjectMapper.Subobjects.AUTO, - () -> ESTestCase.randomFrom(ObjectMapper.Subobjects.values()) - ).toString() - ); - } + var subobjects = ESTestCase.randomValueOtherThan( + ObjectMapper.Subobjects.AUTO, + () -> ESTestCase.randomFrom(ObjectMapper.Subobjects.values()) + ); - if (request.parentSubobjects() == ObjectMapper.Subobjects.DISABLED - || parameters.getOrDefault("subobjects", "true").equals("false")) { + if (request.parentSubobjects() == ObjectMapper.Subobjects.DISABLED || subobjects == ObjectMapper.Subobjects.DISABLED) { // "enabled: false" is not compatible with subobjects: false // changing "dynamic" from parent context is not compatible with subobjects: false // changing subobjects value is not compatible with subobjects: false @@ -124,6 +118,9 @@ public class DefaultMappingParametersHandler implements DataSourceHandler { return parameters; } + if (ESTestCase.randomBoolean()) { + parameters.put("subobjects", subobjects.toString()); + } if (ESTestCase.randomBoolean()) { parameters.put("dynamic", ESTestCase.randomFrom("true", "false", "strict", "runtime")); } From f633148d102a314bb8e3074ccf797f9444c94fd1 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 8 Oct 2024 12:17:56 -0400 Subject: [PATCH 53/85] Docs: ESQL doesn't preserve `null`s in a list (#114335) The doc values don't preserve `null`s in a list so ESQL doesn't either. Closes #114324 --- .../esql/multivalued-fields.asciidoc | 31 +++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/docs/reference/esql/multivalued-fields.asciidoc b/docs/reference/esql/multivalued-fields.asciidoc index 2dfda3060d3e..562ea2a2e6b4 100644 --- a/docs/reference/esql/multivalued-fields.asciidoc +++ b/docs/reference/esql/multivalued-fields.asciidoc @@ -177,6 +177,37 @@ POST /_query ---- // TESTRESPONSE[s/"took": 28/"took": "$body.took"/] +[discrete] +[[esql-multivalued-nulls]] +==== `null` in a list + +`null` values in a list are not preserved at the storage layer: + +[source,console,id=esql-multivalued-fields-multivalued-nulls] +---- +POST /mv/_doc?refresh +{ "a": [2, null, 1] } + +POST /_query +{ + "query": "FROM mv | LIMIT 1" +} +---- + +[source,console-result] +---- +{ + "took": 28, + "columns": [ + { "name": "a", "type": "long"}, + ], + "values": [ + [[1, 2]], + ] +} +---- +// TESTRESPONSE[s/"took": 28/"took": "$body.took"/] + [discrete] [[esql-multivalued-fields-functions]] ==== Functions From d3fa42cda096a4dadc9ac15a1b6dce9a235834b7 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 8 Oct 2024 12:37:55 -0400 Subject: [PATCH 54/85] ESQL: Entirely remove META FUNCTIONS (#113967) This removes the undocumented `META FUNCTIONS` command that emits descriptions for all functions. This shouldn't be used because we never told anyone about it. I'd have preferred if we'd have explicitly documented it as no public or if we'd have left it snapshot-only. But sometimes you make a mistake. I'm hopeful no one is relying on it. It was never reliable and not public..... --- .../src/main/resources/changelog-schema.json | 1 + docs/changelog/113967.yaml | 13 + docs/reference/rest-api/usage.asciidoc | 3 +- x-pack/plugin/build.gradle | 1 + .../esql/qa/mixed/MixedClusterEsqlSpecIT.java | 4 - .../xpack/esql/ccq/MultiClusterSpecIT.java | 4 - .../src/main/resources/meta.csv-spec | 552 ----- .../xpack/esql/action/EsqlActionIT.java | 24 - .../esql/src/main/antlr/EsqlBaseLexer.g4 | 21 - .../esql/src/main/antlr/EsqlBaseLexer.tokens | 329 ++- .../esql/src/main/antlr/EsqlBaseParser.g4 | 5 - .../esql/src/main/antlr/EsqlBaseParser.tokens | 329 ++- .../xpack/esql/action/EsqlCapabilities.java | 6 +- .../function/EsqlFunctionRegistry.java | 26 - .../xpack/esql/parser/EsqlBaseLexer.interp | 19 +- .../xpack/esql/parser/EsqlBaseLexer.java | 1979 ++++++++--------- .../xpack/esql/parser/EsqlBaseParser.interp | 13 +- .../xpack/esql/parser/EsqlBaseParser.java | 1507 ++++++------- .../parser/EsqlBaseParserBaseListener.java | 12 - .../parser/EsqlBaseParserBaseVisitor.java | 7 - .../esql/parser/EsqlBaseParserListener.java | 12 - .../esql/parser/EsqlBaseParserVisitor.java | 7 - .../xpack/esql/parser/LogicalPlanBuilder.java | 6 - .../esql/plan/logical/meta/MetaFunctions.java | 143 -- .../xpack/esql/planner/Mapper.java | 4 - .../xpack/esql/stats/FeatureMetric.java | 9 +- .../esql/parser/StatementParserTests.java | 1 - .../esql/stats/VerifierMetricsTests.java | 40 - .../rest-api-spec/test/esql/60_usage.yml | 12 +- 29 files changed, 2038 insertions(+), 3051 deletions(-) create mode 100644 docs/changelog/113967.yaml delete mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec delete mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/meta/MetaFunctions.java diff --git a/build-tools-internal/src/main/resources/changelog-schema.json b/build-tools-internal/src/main/resources/changelog-schema.json index a435305a8e3e..451701d74d69 100644 --- a/build-tools-internal/src/main/resources/changelog-schema.json +++ b/build-tools-internal/src/main/resources/changelog-schema.json @@ -284,6 +284,7 @@ "Cluster and node setting", "Command line tool", "CRUD", + "ES|QL", "Index setting", "Ingest", "JVM option", diff --git a/docs/changelog/113967.yaml b/docs/changelog/113967.yaml new file mode 100644 index 000000000000..58b72eba49de --- /dev/null +++ b/docs/changelog/113967.yaml @@ -0,0 +1,13 @@ +pr: 113967 +summary: "ESQL: Entirely remove META FUNCTIONS" +area: ES|QL +type: breaking +issues: [] +breaking: + title: "ESQL: Entirely remove META FUNCTIONS" + area: ES|QL + details: | + Removes an undocumented syntax from ESQL: META FUNCTION. This was never + reliable or really useful. Consult the documentation instead. + impact: "Removes an undocumented syntax from ESQL: META FUNCTION" + notable: false diff --git a/docs/reference/rest-api/usage.asciidoc b/docs/reference/rest-api/usage.asciidoc index 4dcf0d328e4f..957f57ffc910 100644 --- a/docs/reference/rest-api/usage.asciidoc +++ b/docs/reference/rest-api/usage.asciidoc @@ -239,8 +239,7 @@ GET /_xpack/usage "keep" : 0, "enrich" : 0, "from" : 0, - "row" : 0, - "meta" : 0 + "row" : 0 }, "queries" : { "rest" : { diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index 158cccb1b6ea..3e5aaea43a9b 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -84,5 +84,6 @@ tasks.named("yamlRestCompatTestTransform").configure({ task -> task.skipTest("security/10_forbidden/Test bulk response with invalid credentials", "warning does not exist for compatibility") task.skipTest("wildcard/30_ignore_above_synthetic_source/wildcard field type ignore_above", "Temporary until backported") task.skipTest("inference/inference_crud/Test get all", "Assertions on number of inference models break due to default configs") + task.skipTest("esql/60_usage/Basic ESQL usage output (telemetry)", "The telemetry output changed. We dropped a column. That's safe.") }) diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java index 08b4794b740d..d0d6d5fa49c4 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java @@ -72,10 +72,6 @@ public class MixedClusterEsqlSpecIT extends EsqlSpecTestCase { protected void shouldSkipTest(String testName) throws IOException { super.shouldSkipTest(testName); assumeTrue("Test " + testName + " is skipped on " + bwcVersion, isEnabled(testName, instructions, bwcVersion)); - assumeFalse( - "Skip META tests on mixed version clusters because we change it too quickly", - testCase.requiredCapabilities.contains("meta") - ); if (mode == ASYNC) { assumeTrue("Async is not supported on " + bwcVersion, supportsAsync()); } diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java index 8d54dc63598f..3e799730f726 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -112,10 +112,6 @@ public class MultiClusterSpecIT extends EsqlSpecTestCase { ); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains("inlinestats")); assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains("inlinestats_v2")); - assumeFalse( - "Skip META tests on mixed version clusters because we change it too quickly", - testCase.requiredCapabilities.contains("meta") - ); } private TestFeatureService remoteFeaturesService() throws IOException { diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec deleted file mode 100644 index 6e8d5fba67ce..000000000000 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ /dev/null @@ -1,552 +0,0 @@ -metaFunctionsSynopsis -required_capability: date_nanos_type -required_capability: meta - -meta functions | keep synopsis; - -synopsis:keyword -"double|integer|long|unsigned_long abs(number:double|integer|long|unsigned_long)" -"double acos(number:double|integer|long|unsigned_long)" -"double asin(number:double|integer|long|unsigned_long)" -"double atan(number:double|integer|long|unsigned_long)" -"double atan2(y_coordinate:double|integer|long|unsigned_long, x_coordinate:double|integer|long|unsigned_long)" -"double avg(number:double|integer|long)" -"double|date bin(field:integer|long|double|date, buckets:integer|long|double|date_period|time_duration, ?from:integer|long|double|date|keyword|text, ?to:integer|long|double|date|keyword|text)" -"double|date bucket(field:integer|long|double|date, buckets:integer|long|double|date_period|time_duration, ?from:integer|long|double|date|keyword|text, ?to:integer|long|double|date|keyword|text)" -"boolean|cartesian_point|cartesian_shape|date|date_nanos|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version case(condition:boolean, trueValue...:boolean|cartesian_point|cartesian_shape|date|date_nanos|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" -"double cbrt(number:double|integer|long|unsigned_long)" -"double|integer|long|unsigned_long ceil(number:double|integer|long|unsigned_long)" -"boolean cidr_match(ip:ip, blockX...:keyword|text)" -"boolean|cartesian_point|cartesian_shape|date|geo_point|geo_shape|integer|ip|keyword|long|text|version coalesce(first:boolean|cartesian_point|cartesian_shape|date|geo_point|geo_shape|integer|ip|keyword|long|text|version, ?rest...:boolean|cartesian_point|cartesian_shape|date|geo_point|geo_shape|integer|ip|keyword|long|text|version)" -"keyword concat(string1:keyword|text, string2...:keyword|text)" -"double cos(angle:double|integer|long|unsigned_long)" -"double cosh(number:double|integer|long|unsigned_long)" -"long count(?field:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" -"long count_distinct(field:boolean|date|double|integer|ip|keyword|long|text|version, ?precision:integer|long|unsigned_long)" -"integer date_diff(unit:keyword|text, startTimestamp:date, endTimestamp:date)" -"long date_extract(datePart:keyword|text, date:date)" -"keyword date_format(?dateFormat:keyword|text, date:date)" -"date date_parse(?datePattern:keyword|text, dateString:keyword|text)" -"date date_trunc(interval:date_period|time_duration, date:date)" -double e() -"boolean ends_with(str:keyword|text, suffix:keyword|text)" -"double exp(number:double|integer|long|unsigned_long)" -"double|integer|long|unsigned_long floor(number:double|integer|long|unsigned_long)" -"keyword from_base64(string:keyword|text)" -"boolean|date|double|integer|ip|keyword|long|text|version greatest(first:boolean|date|double|integer|ip|keyword|long|text|version, ?rest...:boolean|date|double|integer|ip|keyword|long|text|version)" -"ip ip_prefix(ip:ip, prefixLengthV4:integer, prefixLengthV6:integer)" -"boolean|date|double|integer|ip|keyword|long|text|version least(first:boolean|date|double|integer|ip|keyword|long|text|version, ?rest...:boolean|date|double|integer|ip|keyword|long|text|version)" -"keyword left(string:keyword|text, length:integer)" -"integer length(string:keyword|text)" -"integer locate(string:keyword|text, substring:keyword|text, ?start:integer)" -"double log(?base:integer|unsigned_long|long|double, number:integer|unsigned_long|long|double)" -"double log10(number:double|integer|long|unsigned_long)" -"keyword|text ltrim(string:keyword|text)" -"boolean|double|integer|long|date|ip|keyword|text|long|version max(field:boolean|double|integer|long|date|ip|keyword|text|long|version)" -"double median(number:double|integer|long)" -"double median_absolute_deviation(number:double|integer|long)" -"boolean|double|integer|long|date|ip|keyword|text|long|version min(field:boolean|double|integer|long|date|ip|keyword|text|long|version)" -"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version mv_append(field1:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version, field2:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version)" -"double mv_avg(number:double|integer|long|unsigned_long)" -"keyword mv_concat(string:text|keyword, delim:text|keyword)" -"integer mv_count(field:boolean|cartesian_point|cartesian_shape|date|date_nanos|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" -"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version mv_dedupe(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version)" -"boolean|cartesian_point|cartesian_shape|date|date_nanos|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_first(field:boolean|cartesian_point|cartesian_shape|date|date_nanos|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" -"boolean|cartesian_point|cartesian_shape|date|date_nanos|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_last(field:boolean|cartesian_point|cartesian_shape|date|date_nanos|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" -"boolean|date|date_nanos|double|integer|ip|keyword|long|text|unsigned_long|version mv_max(field:boolean|date|date_nanos|double|integer|ip|keyword|long|text|unsigned_long|version)" -"double|integer|long|unsigned_long mv_median(number:double|integer|long|unsigned_long)" -"double|integer|long|unsigned_long mv_median_absolute_deviation(number:double|integer|long|unsigned_long)" -"boolean|date|date_nanos|double|integer|ip|keyword|long|text|unsigned_long|version mv_min(field:boolean|date|date_nanos|double|integer|ip|keyword|long|text|unsigned_long|version)" -"double|integer|long mv_percentile(number:double|integer|long, percentile:double|integer|long)" -"double mv_pseries_weighted_sum(number:double, p:double)" -"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version mv_slice(field:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version, start:integer, ?end:integer)" -"boolean|date|double|integer|ip|keyword|long|text|version mv_sort(field:boolean|date|double|integer|ip|keyword|long|text|version, ?order:keyword)" -"double|integer|long|unsigned_long mv_sum(number:double|integer|long|unsigned_long)" -"keyword mv_zip(string1:keyword|text, string2:keyword|text, ?delim:keyword|text)" -date now() -"double percentile(number:double|integer|long, percentile:double|integer|long)" -double pi() -"double pow(base:double|integer|long|unsigned_long, exponent:double|integer|long|unsigned_long)" -"keyword repeat(string:keyword|text, number:integer)" -"keyword replace(string:keyword|text, regex:keyword|text, newString:keyword|text)" -"keyword|text reverse(str:keyword|text)" -"keyword right(string:keyword|text, length:integer)" -"double|integer|long|unsigned_long round(number:double|integer|long|unsigned_long, ?decimals:integer)" -"keyword|text rtrim(string:keyword|text)" -"double signum(number:double|integer|long|unsigned_long)" -"double sin(angle:double|integer|long|unsigned_long)" -"double sinh(number:double|integer|long|unsigned_long)" -"keyword space(number:integer)" -"keyword split(string:keyword|text, delim:keyword|text)" -"double sqrt(number:double|integer|long|unsigned_long)" -"geo_point|cartesian_point st_centroid_agg(field:geo_point|cartesian_point)" -"boolean st_contains(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" -"boolean st_disjoint(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" -"double st_distance(geomA:geo_point|cartesian_point, geomB:geo_point|cartesian_point)" -"boolean st_intersects(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" -"boolean st_within(geomA:geo_point|cartesian_point|geo_shape|cartesian_shape, geomB:geo_point|cartesian_point|geo_shape|cartesian_shape)" -"double st_x(point:geo_point|cartesian_point)" -"double st_y(point:geo_point|cartesian_point)" -"boolean starts_with(str:keyword|text, prefix:keyword|text)" -"keyword substring(string:keyword|text, start:integer, ?length:integer)" -"long|double sum(number:double|integer|long)" -"double tan(angle:double|integer|long|unsigned_long)" -"double tanh(number:double|integer|long|unsigned_long)" -double tau() -"keyword to_base64(string:keyword|text)" -"boolean to_bool(field:boolean|keyword|text|double|long|unsigned_long|integer)" -"boolean to_boolean(field:boolean|keyword|text|double|long|unsigned_long|integer)" -"cartesian_point to_cartesianpoint(field:cartesian_point|keyword|text)" -"cartesian_shape to_cartesianshape(field:cartesian_point|cartesian_shape|keyword|text)" -"date_nanos to_date_nanos(field:date|date_nanos|keyword|text|double|long|unsigned_long)" -"date_nanos to_datenanos(field:date|date_nanos|keyword|text|double|long|unsigned_long)" -"date_period to_dateperiod(field:date_period|keyword|text)" -"date to_datetime(field:date|date_nanos|keyword|text|double|long|unsigned_long|integer)" -"double to_dbl(field:boolean|date|keyword|text|double|long|unsigned_long|integer|counter_double|counter_integer|counter_long)" -"double to_degrees(number:double|integer|long|unsigned_long)" -"double to_double(field:boolean|date|keyword|text|double|long|unsigned_long|integer|counter_double|counter_integer|counter_long)" -"date to_dt(field:date|date_nanos|keyword|text|double|long|unsigned_long|integer)" -"geo_point to_geopoint(field:geo_point|keyword|text)" -"geo_shape to_geoshape(field:geo_point|geo_shape|keyword|text)" -"integer to_int(field:boolean|date|keyword|text|double|long|unsigned_long|integer|counter_integer)" -"integer to_integer(field:boolean|date|keyword|text|double|long|unsigned_long|integer|counter_integer)" -"ip to_ip(field:ip|keyword|text)" -"long to_long(field:boolean|date|date_nanos|keyword|text|double|long|unsigned_long|integer|counter_integer|counter_long)" -"keyword|text to_lower(str:keyword|text)" -"double to_radians(number:double|integer|long|unsigned_long)" -"keyword to_str(field:boolean|cartesian_point|cartesian_shape|date|date_nanos|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" -"keyword to_string(field:boolean|cartesian_point|cartesian_shape|date|date_nanos|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" -"time_duration to_timeduration(field:time_duration|keyword|text)" -"unsigned_long to_ul(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" -"unsigned_long to_ulong(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" -"unsigned_long to_unsigned_long(field:boolean|date|keyword|text|double|long|unsigned_long|integer)" -"keyword|text to_upper(str:keyword|text)" -"version to_ver(field:keyword|text|version)" -"version to_version(field:keyword|text|version)" -"boolean|double|integer|long|date|ip|keyword|text top(field:boolean|double|integer|long|date|ip|keyword|text, limit:integer, order:keyword)" -"keyword|text trim(string:keyword|text)" -"boolean|date|double|integer|ip|keyword|long|text|version values(field:boolean|date|double|integer|ip|keyword|long|text|version)" -"double weighted_avg(number:double|integer|long, weight:double|integer|long)" -; - -metaFunctionsArgs -required_capability: meta -required_capability: date_nanos_type - - META functions -| EVAL name = SUBSTRING(name, 0, 14) -| KEEP name, argNames, argTypes, argDescriptions; - - name:keyword | argNames:keyword | argTypes:keyword | argDescriptions:keyword -abs |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. -acos |number |"double|integer|long|unsigned_long" |Number between -1 and 1. If `null`, the function returns `null`. -asin |number |"double|integer|long|unsigned_long" |Number between -1 and 1. If `null`, the function returns `null`. -atan |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. -atan2 |[y_coordinate, x_coordinate] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |[y coordinate. If `null`\, the function returns `null`., x coordinate. If `null`\, the function returns `null`.] -avg |number |"double|integer|long" |[""] -bin |[field, buckets, from, to] |["integer|long|double|date", "integer|long|double|date_period|time_duration", "integer|long|double|date|keyword|text", "integer|long|double|date|keyword|text"] |[Numeric or date expression from which to derive buckets., Target number of buckets\, or desired bucket size if `from` and `to` parameters are omitted., Start of the range. Can be a number\, a date or a date expressed as a string., End of the range. Can be a number\, a date or a date expressed as a string.] -bucket |[field, buckets, from, to] |["integer|long|double|date", "integer|long|double|date_period|time_duration", "integer|long|double|date|keyword|text", "integer|long|double|date|keyword|text"] |[Numeric or date expression from which to derive buckets., Target number of buckets\, or desired bucket size if `from` and `to` parameters are omitted., Start of the range. Can be a number\, a date or a date expressed as a string., End of the range. Can be a number\, a date or a date expressed as a string.] -case |[condition, trueValue] |[boolean, "boolean|cartesian_point|cartesian_shape|date|date_nanos|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version"] |[A condition., The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches.] -cbrt |number |"double|integer|long|unsigned_long" |"Numeric expression. If `null`, the function returns `null`." -ceil |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. -cidr_match |[ip, blockX] |[ip, "keyword|text"] |[IP address of type `ip` (both IPv4 and IPv6 are supported)., CIDR block to test the IP against.] -coalesce |first |"boolean|cartesian_point|cartesian_shape|date|geo_point|geo_shape|integer|ip|keyword|long|text|version" |Expression to evaluate. -concat |[string1, string2] |["keyword|text", "keyword|text"] |[Strings to concatenate., Strings to concatenate.] -cos |angle |"double|integer|long|unsigned_long" |An angle, in radians. If `null`, the function returns `null`. -cosh |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. -count |field |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" |Expression that outputs values to be counted. If omitted, equivalent to `COUNT(*)` (the number of rows). -count_distinct|[field, precision] |["boolean|date|double|integer|ip|keyword|long|text|version", "integer|long|unsigned_long"] |[Column or literal for which to count the number of distinct values., Precision threshold. Refer to <>. The maximum supported value is 40000. Thresholds above this number will have the same effect as a threshold of 40000. The default value is 3000.] -date_diff |[unit, startTimestamp, endTimestamp]|["keyword|text", date, date] |[Time difference unit, A string representing a start timestamp, A string representing an end timestamp] -date_extract |[datePart, date] |["keyword|text", date] |[Part of the date to extract. Can be: `aligned_day_of_week_in_month`\, `aligned_day_of_week_in_year`\, `aligned_week_of_month`\, `aligned_week_of_year`\, `ampm_of_day`\, `clock_hour_of_ampm`\, `clock_hour_of_day`\, `day_of_month`\, `day_of_week`\, `day_of_year`\, `epoch_day`\, `era`\, `hour_of_ampm`\, `hour_of_day`\, `instant_seconds`\, `micro_of_day`\, `micro_of_second`\, `milli_of_day`\, `milli_of_second`\, `minute_of_day`\, `minute_of_hour`\, `month_of_year`\, `nano_of_day`\, `nano_of_second`\, `offset_seconds`\, `proleptic_month`\, `second_of_day`\, `second_of_minute`\, `year`\, or `year_of_era`. Refer to https://docs.oracle.com/javase/8/docs/api/java/time/temporal/ChronoField.html[java.time.temporal.ChronoField] for a description of these values. If `null`\, the function returns `null`., Date expression. If `null`\, the function returns `null`.] -date_format |[dateFormat, date] |["keyword|text", date] |[Date format (optional). If no format is specified\, the `yyyy-MM-dd'T'HH:mm:ss.SSSZ` format is used. If `null`\, the function returns `null`., Date expression. If `null`\, the function returns `null`.] -date_parse |[datePattern, dateString] |["keyword|text", "keyword|text"] |[The date format. Refer to the https://docs.oracle.com/en/java/javase/14/docs/api/java.base/java/time/format/DateTimeFormatter.html[`DateTimeFormatter` documentation] for the syntax. If `null`\, the function returns `null`., Date expression as a string. If `null` or an empty string\, the function returns `null`.] -date_trunc |[interval, date] |["date_period|time_duration", date] |[Interval; expressed using the timespan literal syntax., Date expression] -e |null |null |null -ends_with |[str, suffix] |["keyword|text", "keyword|text"] |[String expression. If `null`\, the function returns `null`., String expression. If `null`\, the function returns `null`.] -exp |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. -floor |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. -from_base64 |string |"keyword|text" |A base64 string. -greatest |first |"boolean|date|double|integer|ip|keyword|long|text|version" |First of the columns to evaluate. -ip_prefix |[ip, prefixLengthV4, prefixLengthV6]|[ip, integer, integer] |[IP address of type `ip` (both IPv4 and IPv6 are supported)., Prefix length for IPv4 addresses., Prefix length for IPv6 addresses.] -least |first |"boolean|date|double|integer|ip|keyword|long|text|version" |First of the columns to evaluate. -left |[string, length] |["keyword|text", integer] |[The string from which to return a substring., The number of characters to return.] -length |string |"keyword|text" |String expression. If `null`, the function returns `null`. -locate |[string, substring, start] |["keyword|text", "keyword|text", "integer"] |[An input string, A substring to locate in the input string, The start index] -log |[base, number] |["integer|unsigned_long|long|double", "integer|unsigned_long|long|double"] |["Base of logarithm. If `null`\, the function returns `null`. If not provided\, this function returns the natural logarithm (base e) of a value.", "Numeric expression. If `null`\, the function returns `null`."] -log10 |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. -ltrim |string |"keyword|text" |String expression. If `null`, the function returns `null`. -max |field |"boolean|double|integer|long|date|ip|keyword|text|long|version" |[""] -median |number |"double|integer|long" |[""] -median_absolut|number |"double|integer|long" |[""] -min |field |"boolean|double|integer|long|date|ip|keyword|text|long|version" |[""] -mv_append |[field1, field2] |["boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version", "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version"] | ["", ""] -mv_avg |number |"double|integer|long|unsigned_long" |Multivalue expression. -mv_concat |[string, delim] |["text|keyword", "text|keyword"] |[Multivalue expression., Delimiter.] -mv_count |field |"boolean|cartesian_point|cartesian_shape|date|date_nanos|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" |Multivalue expression. -mv_dedupe |field |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version" |Multivalue expression. -mv_first |field |"boolean|cartesian_point|cartesian_shape|date|date_nanos|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" |Multivalue expression. -mv_last |field |"boolean|cartesian_point|cartesian_shape|date|date_nanos|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" |Multivalue expression. -mv_max |field |"boolean|date|date_nanos|double|integer|ip|keyword|long|text|unsigned_long|version" |Multivalue expression. -mv_median |number |"double|integer|long|unsigned_long" |Multivalue expression. -mv_median_abso|number |"double|integer|long|unsigned_long" |Multivalue expression. -mv_min |field |"boolean|date|date_nanos|double|integer|ip|keyword|long|text|unsigned_long|version" |Multivalue expression. -mv_percentile |[number, percentile] |["double|integer|long", "double|integer|long"] |[Multivalue expression., The percentile to calculate. Must be a number between 0 and 100. Numbers out of range will return a null instead.] -mv_pseries_wei|[number, p] |[double, double] |[Multivalue expression., It is a constant number that represents the 'p' parameter in the P-Series. It impacts every element's contribution to the weighted sum.] -mv_slice |[field, start, end] |["boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version", integer, integer]|[Multivalue expression. If `null`\, the function returns `null`., Start position. If `null`\, the function returns `null`. The start argument can be negative. An index of -1 is used to specify the last value in the list., End position(included). Optional; if omitted\, the position at `start` is returned. The end argument can be negative. An index of -1 is used to specify the last value in the list.] -mv_sort |[field, order] |["boolean|date|double|integer|ip|keyword|long|text|version", keyword] |[Multivalue expression. If `null`\, the function returns `null`., Sort order. The valid options are ASC and DESC\, the default is ASC.] -mv_sum |number |"double|integer|long|unsigned_long" |Multivalue expression. -mv_zip |[string1, string2, delim] |["keyword|text", "keyword|text", "keyword|text"] |[Multivalue expression., Multivalue expression., Delimiter. Optional; if omitted\, `\,` is used as a default delimiter.] -now |null |null |null -percentile |[number, percentile] |["double|integer|long", "double|integer|long"] |[, ] -pi |null |null |null -pow |[base, exponent] |["double|integer|long|unsigned_long", "double|integer|long|unsigned_long"] |["Numeric expression for the base. If `null`\, the function returns `null`.", "Numeric expression for the exponent. If `null`\, the function returns `null`."] -repeat |[string, number] |["keyword|text", integer] |[String expression., Number times to repeat.] -replace |[string, regex, newString] |["keyword|text", "keyword|text", "keyword|text"] |[String expression., Regular expression., Replacement string.] -reverse |str |"keyword|text" |String expression. If `null`, the function returns `null`. -right |[string, length] |["keyword|text", integer] |[The string from which to returns a substring., The number of characters to return.] -round |[number, decimals] |["double|integer|long|unsigned_long", integer] |["The numeric value to round. If `null`\, the function returns `null`.", "The number of decimal places to round to. Defaults to 0. If `null`\, the function returns `null`."] -rtrim |string |"keyword|text" |String expression. If `null`, the function returns `null`. -signum |number |"double|integer|long|unsigned_long" |"Numeric expression. If `null`, the function returns `null`." -sin |angle |"double|integer|long|unsigned_long" |An angle, in radians. If `null`, the function returns `null`. -sinh |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. -space |number |"integer" |Number of spaces in result. -split |[string, delim] |["keyword|text", "keyword|text"] |[String expression. If `null`\, the function returns `null`., Delimiter. Only single byte delimiters are currently supported.] -sqrt |number |"double|integer|long|unsigned_long" |"Numeric expression. If `null`, the function returns `null`." -st_centroid_ag|field |"geo_point|cartesian_point" |[""] -st_contains |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`., Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters.] -st_disjoint |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`., Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters.] -st_distance |[geomA, geomB] |["geo_point|cartesian_point", "geo_point|cartesian_point"] |[Expression of type `geo_point` or `cartesian_point`. If `null`\, the function returns `null`., Expression of type `geo_point` or `cartesian_point`. If `null`\, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_point` and `cartesian_point` parameters.] -st_intersects |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`., Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters.] -st_within |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`., Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters.] -st_x |point |"geo_point|cartesian_point" |Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`. -st_y |point |"geo_point|cartesian_point" |Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`. -starts_with |[str, prefix] |["keyword|text", "keyword|text"] |[String expression. If `null`\, the function returns `null`., String expression. If `null`\, the function returns `null`.] -substring |[string, start, length] |["keyword|text", integer, integer] |[String expression. If `null`\, the function returns `null`., Start position., Length of the substring from the start position. Optional; if omitted\, all positions after `start` are returned.] -sum |number |"double|integer|long" |[""] -tan |angle |"double|integer|long|unsigned_long" |An angle, in radians. If `null`, the function returns `null`. -tanh |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. -tau |null |null |null -to_base64 |string |"keyword|text" |A string. -to_bool |field |"boolean|keyword|text|double|long|unsigned_long|integer" |Input value. The input can be a single- or multi-valued column or an expression. -to_boolean |field |"boolean|keyword|text|double|long|unsigned_long|integer" |Input value. The input can be a single- or multi-valued column or an expression. -to_cartesianpo|field |"cartesian_point|keyword|text" |Input value. The input can be a single- or multi-valued column or an expression. -to_cartesiansh|field |"cartesian_point|cartesian_shape|keyword|text" |Input value. The input can be a single- or multi-valued column or an expression. -to_date_nanos |field |"date|date_nanos|keyword|text|double|long|unsigned_long" |Input value. The input can be a single- or multi-valued column or an expression. -to_datenanos |field |"date|date_nanos|keyword|text|double|long|unsigned_long" |Input value. The input can be a single- or multi-valued column or an expression. -to_dateperiod |field |"date_period|keyword|text" |Input value. The input is a valid constant date period expression. -to_datetime |field |"date|date_nanos|keyword|text|double|long|unsigned_long|integer" |Input value. The input can be a single- or multi-valued column or an expression. -to_dbl |field |"boolean|date|keyword|text|double|long|unsigned_long|integer|counter_double|counter_integer|counter_long" |Input value. The input can be a single- or multi-valued column or an expression. -to_degrees |number |"double|integer|long|unsigned_long" |Input value. The input can be a single- or multi-valued column or an expression. -to_double |field |"boolean|date|keyword|text|double|long|unsigned_long|integer|counter_double|counter_integer|counter_long" |Input value. The input can be a single- or multi-valued column or an expression. -to_dt |field |"date|date_nanos|keyword|text|double|long|unsigned_long|integer" |Input value. The input can be a single- or multi-valued column or an expression. -to_geopoint |field |"geo_point|keyword|text" |Input value. The input can be a single- or multi-valued column or an expression. -to_geoshape |field |"geo_point|geo_shape|keyword|text" |Input value. The input can be a single- or multi-valued column or an expression. -to_int |field |"boolean|date|keyword|text|double|long|unsigned_long|integer|counter_integer" |Input value. The input can be a single- or multi-valued column or an expression. -to_integer |field |"boolean|date|keyword|text|double|long|unsigned_long|integer|counter_integer" |Input value. The input can be a single- or multi-valued column or an expression. -to_ip |field |"ip|keyword|text" |Input value. The input can be a single- or multi-valued column or an expression. -to_long |field |"boolean|date|date_nanos|keyword|text|double|long|unsigned_long|integer|counter_integer|counter_long" |Input value. The input can be a single- or multi-valued column or an expression. -to_lower |str |"keyword|text" |String expression. If `null`, the function returns `null`. -to_radians |number |"double|integer|long|unsigned_long" |Input value. The input can be a single- or multi-valued column or an expression. -to_str |field |"boolean|cartesian_point|cartesian_shape|date|date_nanos|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" |Input value. The input can be a single- or multi-valued column or an expression. -to_string |field |"boolean|cartesian_point|cartesian_shape|date|date_nanos|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" |Input value. The input can be a single- or multi-valued column or an expression. -to_timeduratio|field |"time_duration|keyword|text" |Input value. The input is a valid constant time duration expression. -to_ul |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" |Input value. The input can be a single- or multi-valued column or an expression. -to_ulong |field |"boolean|date|keyword|text|double|long|unsigned_long|integer" |Input value. The input can be a single- or multi-valued column or an expression. -to_unsigned_lo|field |"boolean|date|keyword|text|double|long|unsigned_long|integer" |Input value. The input can be a single- or multi-valued column or an expression. -to_upper |str |"keyword|text" |String expression. If `null`, the function returns `null`. -to_ver |field |"keyword|text|version" |Input value. The input can be a single- or multi-valued column or an expression. -to_version |field |"keyword|text|version" |Input value. The input can be a single- or multi-valued column or an expression. -top |[field, limit, order] |["boolean|double|integer|long|date|ip|keyword|text", integer, keyword] |[The field to collect the top values for.,The maximum number of values to collect.,The order to calculate the top values. Either `asc` or `desc`.] -trim |string |"keyword|text" |String expression. If `null`, the function returns `null`. -values |field |"boolean|date|double|integer|ip|keyword|long|text|version" |[""] -weighted_avg |[number, weight] |["double|integer|long", "double|integer|long"] |[A numeric value., A numeric weight.] -; - -metaFunctionsDescription -required_capability: meta - - META functions -| EVAL name = SUBSTRING(name, 0, 14) -| KEEP name, description -; - - name:keyword | description:keyword -abs |Returns the absolute value. -acos |Returns the {wikipedia}/Inverse_trigonometric_functions[arccosine] of `n` as an angle, expressed in radians. -asin |Returns the {wikipedia}/Inverse_trigonometric_functions[arcsine] of the input numeric expression as an angle, expressed in radians. -atan |Returns the {wikipedia}/Inverse_trigonometric_functions[arctangent] of the input numeric expression as an angle, expressed in radians. -atan2 |The {wikipedia}/Atan2[angle] between the positive x-axis and the ray from the origin to the point (x , y) in the Cartesian plane, expressed in radians. -avg |The average of a numeric field. -bin |Creates groups of values - buckets - out of a datetime or numeric input. The size of the buckets can either be provided directly, or chosen based on a recommended count and values range. -bucket |Creates groups of values - buckets - out of a datetime or numeric input. The size of the buckets can either be provided directly, or chosen based on a recommended count and values range. -case |Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to `true`. If the number of arguments is odd, the last argument is the default value which is returned when no condition matches. If the number of arguments is even, and no condition matches, the function returns `null`. -cbrt |Returns the cube root of a number. The input can be any numeric value, the return value is always a double. Cube roots of infinities are null. -ceil |Round a number up to the nearest integer. -cidr_match |Returns true if the provided IP is contained in one of the provided CIDR blocks. -coalesce |Returns the first of its arguments that is not null. If all arguments are null, it returns `null`. -concat |Concatenates two or more strings. -cos |Returns the {wikipedia}/Sine_and_cosine[cosine] of an angle. -cosh |Returns the {wikipedia}/Hyperbolic_functions[hyperbolic cosine] of a number. -count |Returns the total number (count) of input values. -count_distinct|Returns the approximate number of distinct values. -date_diff |Subtracts the `startTimestamp` from the `endTimestamp` and returns the difference in multiples of `unit`. If `startTimestamp` is later than the `endTimestamp`, negative values are returned. -date_extract |Extracts parts of a date, like year, month, day, hour. -date_format |Returns a string representation of a date, in the provided format. -date_parse |Returns a date by parsing the second argument using the format specified in the first argument. -date_trunc |Rounds down a date to the closest interval. -e |Returns {wikipedia}/E_(mathematical_constant)[Euler's number]. -ends_with |Returns a boolean that indicates whether a keyword string ends with another string. -exp |Returns the value of e raised to the power of the given number. -floor |Round a number down to the nearest integer. -from_base64 |Decode a base64 string. -greatest |Returns the maximum value from multiple columns. This is similar to <> except it is intended to run on multiple columns at once. -ip_prefix |Truncates an IP to a given prefix length. -least |Returns the minimum value from multiple columns. This is similar to <> except it is intended to run on multiple columns at once. -left |Returns the substring that extracts 'length' chars from 'string' starting from the left. -length |Returns the character length of a string. -locate |Returns an integer that indicates the position of a keyword substring within another string. Returns `0` if the substring cannot be found. Note that string positions start from `1`. -log |Returns the logarithm of a value to a base. The input can be any numeric value, the return value is always a double. Logs of zero, negative numbers, and base of one return `null` as well as a warning. -log10 |Returns the logarithm of a value to base 10. The input can be any numeric value, the return value is always a double. Logs of 0 and negative numbers return `null` as well as a warning. -ltrim |Removes leading whitespaces from a string. -max |The maximum value of a field. -median |The value that is greater than half of all values and less than half of all values, also known as the 50% <>. -median_absolut|"Returns the median absolute deviation, a measure of variability. It is a robust statistic, meaning that it is useful for describing data that may have outliers, or may not be normally distributed. For such data it can be more descriptive than standard deviation. It is calculated as the median of each data point's deviation from the median of the entire sample. That is, for a random variable `X`, the median absolute deviation is `median(|median(X) - X|)`." -min |The minimum value of a field. -mv_append |Concatenates values of two multi-value fields. -mv_avg |Converts a multivalued field into a single valued field containing the average of all of the values. -mv_concat |Converts a multivalued string expression into a single valued column containing the concatenation of all values separated by a delimiter. -mv_count |Converts a multivalued expression into a single valued column containing a count of the number of values. -mv_dedupe |Remove duplicate values from a multivalued field. -mv_first |Converts a multivalued expression into a single valued column containing the first value. This is most useful when reading from a function that emits multivalued columns in a known order like <>. The order that <> are read from underlying storage is not guaranteed. It is *frequently* ascending, but don't rely on that. If you need the minimum value use <> instead of `MV_FIRST`. `MV_MIN` has optimizations for sorted values so there isn't a performance benefit to `MV_FIRST`. -mv_last |Converts a multivalue expression into a single valued column containing the last value. This is most useful when reading from a function that emits multivalued columns in a known order like <>. The order that <> are read from underlying storage is not guaranteed. It is *frequently* ascending, but don't rely on that. If you need the maximum value use <> instead of `MV_LAST`. `MV_MAX` has optimizations for sorted values so there isn't a performance benefit to `MV_LAST`. -mv_max |Converts a multivalued expression into a single valued column containing the maximum value. -mv_median |Converts a multivalued field into a single valued field containing the median value. -mv_median_abso|"Converts a multivalued field into a single valued field containing the median absolute deviation. It is calculated as the median of each data point's deviation from the median of the entire sample. That is, for a random variable `X`, the median absolute deviation is `median(|median(X) - X|)`." -mv_min |Converts a multivalued expression into a single valued column containing the minimum value. -mv_percentile |Converts a multivalued field into a single valued field containing the value at which a certain percentage of observed values occur. -mv_pseries_wei|Converts a multivalued expression into a single-valued column by multiplying every element on the input list by its corresponding term in P-Series and computing the sum. -mv_slice |Returns a subset of the multivalued field using the start and end index values. -mv_sort |Sorts a multivalued field in lexicographical order. -mv_sum |Converts a multivalued field into a single valued field containing the sum of all of the values. -mv_zip |Combines the values from two multivalued fields with a delimiter that joins them together. -now |Returns current date and time. -percentile |Returns the value at which a certain percentage of observed values occur. For example, the 95th percentile is the value which is greater than 95% of the observed values and the 50th percentile is the `MEDIAN`. -pi |Returns {wikipedia}/Pi[Pi], the ratio of a circle's circumference to its diameter. -pow |Returns the value of `base` raised to the power of `exponent`. -repeat |Returns a string constructed by concatenating `string` with itself the specified `number` of times. -replace |The function substitutes in the string `str` any match of the regular expression `regex` with the replacement string `newStr`. -reverse |Returns a new string representing the input string in reverse order. -right |Return the substring that extracts 'length' chars from 'str' starting from the right. -round |Rounds a number to the specified number of decimal places. Defaults to 0, which returns the nearest integer. If the precision is a negative number, rounds to the number of digits left of the decimal point. -rtrim |Removes trailing whitespaces from a string. -signum |Returns the sign of the given number. It returns `-1` for negative numbers, `0` for `0` and `1` for positive numbers. -sin |Returns the {wikipedia}/Sine_and_cosine[sine] of an angle. -sinh |Returns the {wikipedia}/Hyperbolic_functions[hyperbolic sine] of a number. -space |Returns a string made of `number` spaces. -split |Split a single valued string into multiple strings. -sqrt |Returns the square root of a number. The input can be any numeric value, the return value is always a double. Square roots of negative numbers and infinities are null. -st_centroid_ag|Calculate the spatial centroid over a field with spatial point geometry type. -st_contains |Returns whether the first geometry contains the second geometry. This is the inverse of the <> function. -st_disjoint |Returns whether the two geometries or geometry columns are disjoint. This is the inverse of the <> function. In mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅ -st_distance |Computes the distance between two points. For cartesian geometries, this is the pythagorean distance in the same units as the original coordinates. For geographic geometries, this is the circular distance along the great circle in meters. -st_intersects |Returns true if two geometries intersect. They intersect if they have any point in common, including their interior points (points along lines or within polygons). This is the inverse of the <> function. In mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅ -st_within |Returns whether the first geometry is within the second geometry. This is the inverse of the <> function. -st_x |Extracts the `x` coordinate from the supplied point. If the points is of type `geo_point` this is equivalent to extracting the `longitude` value. -st_y |Extracts the `y` coordinate from the supplied point. If the points is of type `geo_point` this is equivalent to extracting the `latitude` value. -starts_with |Returns a boolean that indicates whether a keyword string starts with another string. -substring |Returns a substring of a string, specified by a start position and an optional length. -sum |The sum of a numeric expression. -tan |Returns the {wikipedia}/Sine_and_cosine[tangent] of an angle. -tanh |Returns the {wikipedia}/Hyperbolic_functions[hyperbolic tangent] of a number. -tau |Returns the https://tauday.com/tau-manifesto[ratio] of a circle's circumference to its radius. -to_base64 |Encode a string to a base64 string. -to_bool |Converts an input value to a boolean value. A string value of *true* will be case-insensitive converted to the Boolean *true*. For anything else, including the empty string, the function will return *false*. The numerical value of *0* will be converted to *false*, anything else will be converted to *true*. -to_boolean |Converts an input value to a boolean value. A string value of *true* will be case-insensitive converted to the Boolean *true*. For anything else, including the empty string, the function will return *false*. The numerical value of *0* will be converted to *false*, anything else will be converted to *true*. -to_cartesianpo|Converts an input value to a `cartesian_point` value. A string will only be successfully converted if it respects the {wikipedia}/Well-known_text_representation_of_geometry[WKT Point] format. -to_cartesiansh|Converts an input value to a `cartesian_shape` value. A string will only be successfully converted if it respects the {wikipedia}/Well-known_text_representation_of_geometry[WKT] format. -to_date_nanos |Converts an input to a nanosecond-resolution date value (aka date_nanos). -to_datenanos |Converts an input to a nanosecond-resolution date value (aka date_nanos). -to_dateperiod |Converts an input value into a `date_period` value. -to_datetime |Converts an input value to a date value. A string will only be successfully converted if it's respecting the format `yyyy-MM-dd'T'HH:mm:ss.SSS'Z'`. To convert dates in other formats, use <>. -to_dbl |Converts an input value to a double value. If the input parameter is of a date type, its value will be interpreted as milliseconds since the {wikipedia}/Unix_time[Unix epoch], converted to double. Boolean *true* will be converted to double *1.0*, *false* to *0.0*. -to_degrees |Converts a number in {wikipedia}/Radian[radians] to {wikipedia}/Degree_(angle)[degrees]. -to_double |Converts an input value to a double value. If the input parameter is of a date type, its value will be interpreted as milliseconds since the {wikipedia}/Unix_time[Unix epoch], converted to double. Boolean *true* will be converted to double *1.0*, *false* to *0.0*. -to_dt |Converts an input value to a date value. A string will only be successfully converted if it's respecting the format `yyyy-MM-dd'T'HH:mm:ss.SSS'Z'`. To convert dates in other formats, use <>. -to_geopoint |Converts an input value to a `geo_point` value. A string will only be successfully converted if it respects the {wikipedia}/Well-known_text_representation_of_geometry[WKT Point] format. -to_geoshape |Converts an input value to a `geo_shape` value. A string will only be successfully converted if it respects the {wikipedia}/Well-known_text_representation_of_geometry[WKT] format. -to_int |Converts an input value to an integer value. If the input parameter is of a date type, its value will be interpreted as milliseconds since the {wikipedia}/Unix_time[Unix epoch], converted to integer. Boolean *true* will be converted to integer *1*, *false* to *0*. -to_integer |Converts an input value to an integer value. If the input parameter is of a date type, its value will be interpreted as milliseconds since the {wikipedia}/Unix_time[Unix epoch], converted to integer. Boolean *true* will be converted to integer *1*, *false* to *0*. -to_ip |Converts an input string to an IP value. -to_long |Converts an input value to a long value. If the input parameter is of a date type, its value will be interpreted as milliseconds since the {wikipedia}/Unix_time[Unix epoch], converted to long. Boolean *true* will be converted to long *1*, *false* to *0*. -to_lower |Returns a new string representing the input string converted to lower case. -to_radians |Converts a number in {wikipedia}/Degree_(angle)[degrees] to {wikipedia}/Radian[radians]. -to_str |Converts an input value into a string. -to_string |Converts an input value into a string. -to_timeduratio|Converts an input value into a `time_duration` value. -to_ul |Converts an input value to an unsigned long value. If the input parameter is of a date type, its value will be interpreted as milliseconds since the {wikipedia}/Unix_time[Unix epoch], converted to unsigned long. Boolean *true* will be converted to unsigned long *1*, *false* to *0*. -to_ulong |Converts an input value to an unsigned long value. If the input parameter is of a date type, its value will be interpreted as milliseconds since the {wikipedia}/Unix_time[Unix epoch], converted to unsigned long. Boolean *true* will be converted to unsigned long *1*, *false* to *0*. -to_unsigned_lo|Converts an input value to an unsigned long value. If the input parameter is of a date type, its value will be interpreted as milliseconds since the {wikipedia}/Unix_time[Unix epoch], converted to unsigned long. Boolean *true* will be converted to unsigned long *1*, *false* to *0*. -to_upper |Returns a new string representing the input string converted to upper case. -to_ver |Converts an input string to a version value. -to_version |Converts an input string to a version value. -top |Collects the top values for a field. Includes repeated values. -trim |Removes leading and trailing whitespaces from a string. -values |Returns all values in a group as a multivalued field. The order of the returned values isn't guaranteed. If you need the values returned in order use <>. -weighted_avg |The weighted average of a numeric expression. -; - -metaFunctionsRemaining -required_capability: meta -required_capability: date_nanos_type - - META functions -| EVAL name = SUBSTRING(name, 0, 14) -| KEEP name, * -| DROP synopsis, description, argNames, argTypes, argDescriptions -; - - name:keyword | returnType:keyword | optionalArgs:boolean |variadic:boolean|isAggregation:boolean -abs |"double|integer|long|unsigned_long" |false |false |false -acos |double |false |false |false -asin |double |false |false |false -atan |double |false |false |false -atan2 |double |[false, false] |false |false -avg |double |false |false |true -bin |"double|date" |[false, false, true, true] |false |false -bucket |"double|date" |[false, false, true, true] |false |false -case |"boolean|cartesian_point|cartesian_shape|date|date_nanos|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" |[false, false] |true |false -cbrt |double |false |false |false -ceil |"double|integer|long|unsigned_long" |false |false |false -cidr_match |boolean |[false, false] |true |false -coalesce |"boolean|cartesian_point|cartesian_shape|date|geo_point|geo_shape|integer|ip|keyword|long|text|version" |false |true |false -concat |keyword |[false, false] |true |false -cos |double |false |false |false -cosh |double |false |false |false -count |long |true |false |true -count_distinct|long |[false, true] |false |true -date_diff |integer |[false, false, false] |false |false -date_extract |long |[false, false] |false |false -date_format |keyword |[true, false] |false |false -date_parse |date |[true, false] |false |false -date_trunc |date |[false, false] |false |false -e |double |null |false |false -ends_with |boolean |[false, false] |false |false -exp |double |false |false |false -floor |"double|integer|long|unsigned_long" |false |false |false -from_base64 |keyword |false |false |false -greatest |"boolean|date|double|integer|ip|keyword|long|text|version" |false |true |false -ip_prefix |ip |[false, false, false] |false |false -least |"boolean|date|double|integer|ip|keyword|long|text|version" |false |true |false -left |keyword |[false, false] |false |false -length |integer |false |false |false -locate |integer |[false, false, true] |false |false -log |double |[true, false] |false |false -log10 |double |false |false |false -ltrim |"keyword|text" |false |false |false -max |"boolean|double|integer|long|date|ip|keyword|text|long|version" |false |false |true -median |double |false |false |true -median_absolut|double |false |false |true -min |"boolean|double|integer|long|date|ip|keyword|text|long|version" |false |false |true -mv_append |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version" |[false, false] |false |false -mv_avg |double |false |false |false -mv_concat |keyword |[false, false] |false |false -mv_count |integer |false |false |false -mv_dedupe |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version" |false |false |false -mv_first |"boolean|cartesian_point|cartesian_shape|date|date_nanos|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version"|false |false |false -mv_last |"boolean|cartesian_point|cartesian_shape|date|date_nanos|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version"|false |false |false -mv_max |"boolean|date|date_nanos|double|integer|ip|keyword|long|text|unsigned_long|version" |false |false |false -mv_median |"double|integer|long|unsigned_long" |false |false |false -mv_median_abso|"double|integer|long|unsigned_long" |false |false |false -mv_min |"boolean|date|date_nanos|double|integer|ip|keyword|long|text|unsigned_long|version" |false |false |false -mv_percentile |"double|integer|long" |[false, false] |false |false -mv_pseries_wei|"double" |[false, false] |false |false -mv_slice |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|version" |[false, false, true] |false |false -mv_sort |"boolean|date|double|integer|ip|keyword|long|text|version" |[false, true] |false |false -mv_sum |"double|integer|long|unsigned_long" |false |false |false -mv_zip |keyword |[false, false, true] |false |false -now |date |null |false |false -percentile |double |[false, false] |false |true -pi |double |null |false |false -pow |double |[false, false] |false |false -repeat |keyword |[false, false] |false |false -replace |keyword |[false, false, false] |false |false -reverse |"keyword|text" |false |false |false -right |keyword |[false, false] |false |false -round |"double|integer|long|unsigned_long" |[false, true] |false |false -rtrim |"keyword|text" |false |false |false -signum |double |false |false |false -sin |double |false |false |false -sinh |double |false |false |false -space |keyword |false |false |false -split |keyword |[false, false] |false |false -sqrt |double |false |false |false -st_centroid_ag|"geo_point|cartesian_point" |false |false |true -st_contains |boolean |[false, false] |false |false -st_disjoint |boolean |[false, false] |false |false -st_distance |double |[false, false] |false |false -st_intersects |boolean |[false, false] |false |false -st_within |boolean |[false, false] |false |false -st_x |double |false |false |false -st_y |double |false |false |false -starts_with |boolean |[false, false] |false |false -substring |keyword |[false, false, true] |false |false -sum |"long|double" |false |false |true -tan |double |false |false |false -tanh |double |false |false |false -tau |double |null |false |false -to_base64 |keyword |false |false |false -to_bool |boolean |false |false |false -to_boolean |boolean |false |false |false -to_cartesianpo|cartesian_point |false |false |false -to_cartesiansh|cartesian_shape |false |false |false -to_date_nanos |date_nanos |false |false |false -to_datenanos |date_nanos |false |false |false -to_dateperiod |date_period |false |false |false -to_datetime |date |false |false |false -to_dbl |double |false |false |false -to_degrees |double |false |false |false -to_double |double |false |false |false -to_dt |date |false |false |false -to_geopoint |geo_point |false |false |false -to_geoshape |geo_shape |false |false |false -to_int |integer |false |false |false -to_integer |integer |false |false |false -to_ip |ip |false |false |false -to_long |long |false |false |false -to_lower |"keyword|text" |false |false |false -to_radians |double |false |false |false -to_str |keyword |false |false |false -to_string |keyword |false |false |false -to_timeduratio|time_duration |false |false |false -to_ul |unsigned_long |false |false |false -to_ulong |unsigned_long |false |false |false -to_unsigned_lo|unsigned_long |false |false |false -to_upper |"keyword|text" |false |false |false -to_ver |version |false |false |false -to_version |version |false |false |false -top |"boolean|double|integer|long|date|ip|keyword|text" |[false, false, false] |false |true -trim |"keyword|text" |false |false |false -values |"boolean|date|double|integer|ip|keyword|long|text|version" |false |false |true -weighted_avg |"double" |[false, false] |false |true -; - -metaFunctionsFiltered -required_capability: meta - -META FUNCTIONS -| WHERE STARTS_WITH(name, "sin") -; - -name:keyword | synopsis:keyword |argNames:keyword | argTypes:keyword | argDescriptions:keyword | returnType:keyword | description:keyword | optionalArgs:boolean | variadic:boolean | isAggregation:boolean -sin |"double sin(angle:double|integer|long|unsigned_long)" |angle |"double|integer|long|unsigned_long" | "An angle, in radians. If `null`, the function returns `null`." | double | "Returns the {wikipedia}/Sine_and_cosine[sine] of an angle." | false | false | false -sinh |"double sinh(number:double|integer|long|unsigned_long)" |number |"double|integer|long|unsigned_long" | "Numeric expression. If `null`, the function returns `null`." | double | "Returns the {wikipedia}/Hyperbolic_functions[hyperbolic sine] of a number." | false | false | false -; - -countFunctions -required_capability: meta - -meta functions | stats a = count(*), b = count(*), c = count(*) | mv_expand c; - -a:long | b:long | c:long -122 | 122 | 122 -; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java index e0bef22718d0..147b13b36c44 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java @@ -34,7 +34,6 @@ import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.core.esql.action.ColumnInfo; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.parser.ParsingException; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.junit.Before; @@ -1038,29 +1037,6 @@ public class EsqlActionIT extends AbstractEsqlIntegTestCase { } } - public void testMetaFunctions() { - try (EsqlQueryResponse results = run("meta functions")) { - assertThat( - results.columns(), - equalTo( - List.of( - new ColumnInfoImpl("name", "keyword"), - new ColumnInfoImpl("synopsis", "keyword"), - new ColumnInfoImpl("argNames", "keyword"), - new ColumnInfoImpl("argTypes", "keyword"), - new ColumnInfoImpl("argDescriptions", "keyword"), - new ColumnInfoImpl("returnType", "keyword"), - new ColumnInfoImpl("description", "keyword"), - new ColumnInfoImpl("optionalArgs", "boolean"), - new ColumnInfoImpl("variadic", "boolean"), - new ColumnInfoImpl("isAggregation", "boolean") - ) - ) - ); - assertThat(getValuesList(results).size(), equalTo(new EsqlFunctionRegistry().listFunctions().size())); - } - } - public void testInWithNullValue() { try (EsqlQueryResponse results = run("from test | where null in (data, 2) | keep data")) { assertThat(results.columns(), equalTo(List.of(new ColumnInfoImpl("data", "long")))); diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index 6570a2546997..e7f10d96c89a 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -66,7 +66,6 @@ FROM : 'from' -> pushMode(FROM_MODE); GROK : 'grok' -> pushMode(EXPRESSION_MODE); KEEP : 'keep' -> pushMode(PROJECT_MODE); LIMIT : 'limit' -> pushMode(EXPRESSION_MODE); -META : 'meta' -> pushMode(META_MODE); MV_EXPAND : 'mv_expand' -> pushMode(MVEXPAND_MODE); RENAME : 'rename' -> pushMode(RENAME_MODE); ROW : 'row' -> pushMode(EXPRESSION_MODE); @@ -467,26 +466,6 @@ SHOW_WS : WS -> channel(HIDDEN) ; -// -// META commands -// -mode META_MODE; -META_PIPE : PIPE -> type(PIPE), popMode; - -FUNCTIONS : 'functions'; - -META_LINE_COMMENT - : LINE_COMMENT -> channel(HIDDEN) - ; - -META_MULTILINE_COMMENT - : MULTILINE_COMMENT -> channel(HIDDEN) - ; - -META_WS - : WS -> channel(HIDDEN) - ; - mode SETTING_MODE; SETTING_CLOSING_BRACKET : CLOSING_BRACKET -> type(CLOSING_BRACKET), popMode; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens index 747fbbc64cf5..4fd37ab9900f 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens @@ -7,122 +7,117 @@ FROM=6 GROK=7 KEEP=8 LIMIT=9 -META=10 -MV_EXPAND=11 -RENAME=12 -ROW=13 -SHOW=14 -SORT=15 -STATS=16 -WHERE=17 -DEV_INLINESTATS=18 -DEV_LOOKUP=19 -DEV_MATCH=20 -DEV_METRICS=21 -UNKNOWN_CMD=22 -LINE_COMMENT=23 -MULTILINE_COMMENT=24 -WS=25 -PIPE=26 -QUOTED_STRING=27 -INTEGER_LITERAL=28 -DECIMAL_LITERAL=29 -BY=30 -AND=31 -ASC=32 -ASSIGN=33 -CAST_OP=34 -COMMA=35 -DESC=36 -DOT=37 -FALSE=38 -FIRST=39 -IN=40 -IS=41 -LAST=42 -LIKE=43 -LP=44 -NOT=45 -NULL=46 -NULLS=47 -OR=48 -PARAM=49 -RLIKE=50 -RP=51 -TRUE=52 -EQ=53 -CIEQ=54 -NEQ=55 -LT=56 -LTE=57 -GT=58 -GTE=59 -PLUS=60 -MINUS=61 -ASTERISK=62 -SLASH=63 -PERCENT=64 -NAMED_OR_POSITIONAL_PARAM=65 -OPENING_BRACKET=66 -CLOSING_BRACKET=67 -UNQUOTED_IDENTIFIER=68 -QUOTED_IDENTIFIER=69 -EXPR_LINE_COMMENT=70 -EXPR_MULTILINE_COMMENT=71 -EXPR_WS=72 -EXPLAIN_WS=73 -EXPLAIN_LINE_COMMENT=74 -EXPLAIN_MULTILINE_COMMENT=75 -METADATA=76 -UNQUOTED_SOURCE=77 -FROM_LINE_COMMENT=78 -FROM_MULTILINE_COMMENT=79 -FROM_WS=80 -ID_PATTERN=81 -PROJECT_LINE_COMMENT=82 -PROJECT_MULTILINE_COMMENT=83 -PROJECT_WS=84 -AS=85 -RENAME_LINE_COMMENT=86 -RENAME_MULTILINE_COMMENT=87 -RENAME_WS=88 -ON=89 -WITH=90 -ENRICH_POLICY_NAME=91 -ENRICH_LINE_COMMENT=92 -ENRICH_MULTILINE_COMMENT=93 -ENRICH_WS=94 -ENRICH_FIELD_LINE_COMMENT=95 -ENRICH_FIELD_MULTILINE_COMMENT=96 -ENRICH_FIELD_WS=97 -MVEXPAND_LINE_COMMENT=98 -MVEXPAND_MULTILINE_COMMENT=99 -MVEXPAND_WS=100 -INFO=101 -SHOW_LINE_COMMENT=102 -SHOW_MULTILINE_COMMENT=103 -SHOW_WS=104 -FUNCTIONS=105 -META_LINE_COMMENT=106 -META_MULTILINE_COMMENT=107 -META_WS=108 -COLON=109 -SETTING=110 -SETTING_LINE_COMMENT=111 -SETTTING_MULTILINE_COMMENT=112 -SETTING_WS=113 -LOOKUP_LINE_COMMENT=114 -LOOKUP_MULTILINE_COMMENT=115 -LOOKUP_WS=116 -LOOKUP_FIELD_LINE_COMMENT=117 -LOOKUP_FIELD_MULTILINE_COMMENT=118 -LOOKUP_FIELD_WS=119 -METRICS_LINE_COMMENT=120 -METRICS_MULTILINE_COMMENT=121 -METRICS_WS=122 -CLOSING_METRICS_LINE_COMMENT=123 -CLOSING_METRICS_MULTILINE_COMMENT=124 -CLOSING_METRICS_WS=125 +MV_EXPAND=10 +RENAME=11 +ROW=12 +SHOW=13 +SORT=14 +STATS=15 +WHERE=16 +DEV_INLINESTATS=17 +DEV_LOOKUP=18 +DEV_MATCH=19 +DEV_METRICS=20 +UNKNOWN_CMD=21 +LINE_COMMENT=22 +MULTILINE_COMMENT=23 +WS=24 +PIPE=25 +QUOTED_STRING=26 +INTEGER_LITERAL=27 +DECIMAL_LITERAL=28 +BY=29 +AND=30 +ASC=31 +ASSIGN=32 +CAST_OP=33 +COMMA=34 +DESC=35 +DOT=36 +FALSE=37 +FIRST=38 +IN=39 +IS=40 +LAST=41 +LIKE=42 +LP=43 +NOT=44 +NULL=45 +NULLS=46 +OR=47 +PARAM=48 +RLIKE=49 +RP=50 +TRUE=51 +EQ=52 +CIEQ=53 +NEQ=54 +LT=55 +LTE=56 +GT=57 +GTE=58 +PLUS=59 +MINUS=60 +ASTERISK=61 +SLASH=62 +PERCENT=63 +NAMED_OR_POSITIONAL_PARAM=64 +OPENING_BRACKET=65 +CLOSING_BRACKET=66 +UNQUOTED_IDENTIFIER=67 +QUOTED_IDENTIFIER=68 +EXPR_LINE_COMMENT=69 +EXPR_MULTILINE_COMMENT=70 +EXPR_WS=71 +EXPLAIN_WS=72 +EXPLAIN_LINE_COMMENT=73 +EXPLAIN_MULTILINE_COMMENT=74 +METADATA=75 +UNQUOTED_SOURCE=76 +FROM_LINE_COMMENT=77 +FROM_MULTILINE_COMMENT=78 +FROM_WS=79 +ID_PATTERN=80 +PROJECT_LINE_COMMENT=81 +PROJECT_MULTILINE_COMMENT=82 +PROJECT_WS=83 +AS=84 +RENAME_LINE_COMMENT=85 +RENAME_MULTILINE_COMMENT=86 +RENAME_WS=87 +ON=88 +WITH=89 +ENRICH_POLICY_NAME=90 +ENRICH_LINE_COMMENT=91 +ENRICH_MULTILINE_COMMENT=92 +ENRICH_WS=93 +ENRICH_FIELD_LINE_COMMENT=94 +ENRICH_FIELD_MULTILINE_COMMENT=95 +ENRICH_FIELD_WS=96 +MVEXPAND_LINE_COMMENT=97 +MVEXPAND_MULTILINE_COMMENT=98 +MVEXPAND_WS=99 +INFO=100 +SHOW_LINE_COMMENT=101 +SHOW_MULTILINE_COMMENT=102 +SHOW_WS=103 +COLON=104 +SETTING=105 +SETTING_LINE_COMMENT=106 +SETTTING_MULTILINE_COMMENT=107 +SETTING_WS=108 +LOOKUP_LINE_COMMENT=109 +LOOKUP_MULTILINE_COMMENT=110 +LOOKUP_WS=111 +LOOKUP_FIELD_LINE_COMMENT=112 +LOOKUP_FIELD_MULTILINE_COMMENT=113 +LOOKUP_FIELD_WS=114 +METRICS_LINE_COMMENT=115 +METRICS_MULTILINE_COMMENT=116 +METRICS_WS=117 +CLOSING_METRICS_LINE_COMMENT=118 +CLOSING_METRICS_MULTILINE_COMMENT=119 +CLOSING_METRICS_WS=120 'dissect'=1 'drop'=2 'enrich'=3 @@ -132,55 +127,53 @@ CLOSING_METRICS_WS=125 'grok'=7 'keep'=8 'limit'=9 -'meta'=10 -'mv_expand'=11 -'rename'=12 -'row'=13 -'show'=14 -'sort'=15 -'stats'=16 -'where'=17 -'|'=26 -'by'=30 -'and'=31 -'asc'=32 -'='=33 -'::'=34 -','=35 -'desc'=36 -'.'=37 -'false'=38 -'first'=39 -'in'=40 -'is'=41 -'last'=42 -'like'=43 -'('=44 -'not'=45 -'null'=46 -'nulls'=47 -'or'=48 -'?'=49 -'rlike'=50 -')'=51 -'true'=52 -'=='=53 -'=~'=54 -'!='=55 -'<'=56 -'<='=57 -'>'=58 -'>='=59 -'+'=60 -'-'=61 -'*'=62 -'/'=63 -'%'=64 -']'=67 -'metadata'=76 -'as'=85 -'on'=89 -'with'=90 -'info'=101 -'functions'=105 -':'=109 +'mv_expand'=10 +'rename'=11 +'row'=12 +'show'=13 +'sort'=14 +'stats'=15 +'where'=16 +'|'=25 +'by'=29 +'and'=30 +'asc'=31 +'='=32 +'::'=33 +','=34 +'desc'=35 +'.'=36 +'false'=37 +'first'=38 +'in'=39 +'is'=40 +'last'=41 +'like'=42 +'('=43 +'not'=44 +'null'=45 +'nulls'=46 +'or'=47 +'?'=48 +'rlike'=49 +')'=50 +'true'=51 +'=='=52 +'=~'=53 +'!='=54 +'<'=55 +'<='=56 +'>'=57 +'>='=58 +'+'=59 +'-'=60 +'*'=61 +'/'=62 +'%'=63 +']'=66 +'metadata'=75 +'as'=84 +'on'=88 +'with'=89 +'info'=100 +':'=104 diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index a5691a16ca50..eefe352e9cdd 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -32,7 +32,6 @@ query sourceCommand : explainCommand | fromCommand - | metaCommand | rowCommand | showCommand // in development @@ -289,10 +288,6 @@ showCommand : SHOW INFO #showInfo ; -metaCommand - : META FUNCTIONS #metaFunctions - ; - enrichCommand : ENRICH policyName=ENRICH_POLICY_NAME (ON matchField=qualifiedNamePattern)? (WITH enrichWithClause (COMMA enrichWithClause)*)? ; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens index 747fbbc64cf5..4fd37ab9900f 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens @@ -7,122 +7,117 @@ FROM=6 GROK=7 KEEP=8 LIMIT=9 -META=10 -MV_EXPAND=11 -RENAME=12 -ROW=13 -SHOW=14 -SORT=15 -STATS=16 -WHERE=17 -DEV_INLINESTATS=18 -DEV_LOOKUP=19 -DEV_MATCH=20 -DEV_METRICS=21 -UNKNOWN_CMD=22 -LINE_COMMENT=23 -MULTILINE_COMMENT=24 -WS=25 -PIPE=26 -QUOTED_STRING=27 -INTEGER_LITERAL=28 -DECIMAL_LITERAL=29 -BY=30 -AND=31 -ASC=32 -ASSIGN=33 -CAST_OP=34 -COMMA=35 -DESC=36 -DOT=37 -FALSE=38 -FIRST=39 -IN=40 -IS=41 -LAST=42 -LIKE=43 -LP=44 -NOT=45 -NULL=46 -NULLS=47 -OR=48 -PARAM=49 -RLIKE=50 -RP=51 -TRUE=52 -EQ=53 -CIEQ=54 -NEQ=55 -LT=56 -LTE=57 -GT=58 -GTE=59 -PLUS=60 -MINUS=61 -ASTERISK=62 -SLASH=63 -PERCENT=64 -NAMED_OR_POSITIONAL_PARAM=65 -OPENING_BRACKET=66 -CLOSING_BRACKET=67 -UNQUOTED_IDENTIFIER=68 -QUOTED_IDENTIFIER=69 -EXPR_LINE_COMMENT=70 -EXPR_MULTILINE_COMMENT=71 -EXPR_WS=72 -EXPLAIN_WS=73 -EXPLAIN_LINE_COMMENT=74 -EXPLAIN_MULTILINE_COMMENT=75 -METADATA=76 -UNQUOTED_SOURCE=77 -FROM_LINE_COMMENT=78 -FROM_MULTILINE_COMMENT=79 -FROM_WS=80 -ID_PATTERN=81 -PROJECT_LINE_COMMENT=82 -PROJECT_MULTILINE_COMMENT=83 -PROJECT_WS=84 -AS=85 -RENAME_LINE_COMMENT=86 -RENAME_MULTILINE_COMMENT=87 -RENAME_WS=88 -ON=89 -WITH=90 -ENRICH_POLICY_NAME=91 -ENRICH_LINE_COMMENT=92 -ENRICH_MULTILINE_COMMENT=93 -ENRICH_WS=94 -ENRICH_FIELD_LINE_COMMENT=95 -ENRICH_FIELD_MULTILINE_COMMENT=96 -ENRICH_FIELD_WS=97 -MVEXPAND_LINE_COMMENT=98 -MVEXPAND_MULTILINE_COMMENT=99 -MVEXPAND_WS=100 -INFO=101 -SHOW_LINE_COMMENT=102 -SHOW_MULTILINE_COMMENT=103 -SHOW_WS=104 -FUNCTIONS=105 -META_LINE_COMMENT=106 -META_MULTILINE_COMMENT=107 -META_WS=108 -COLON=109 -SETTING=110 -SETTING_LINE_COMMENT=111 -SETTTING_MULTILINE_COMMENT=112 -SETTING_WS=113 -LOOKUP_LINE_COMMENT=114 -LOOKUP_MULTILINE_COMMENT=115 -LOOKUP_WS=116 -LOOKUP_FIELD_LINE_COMMENT=117 -LOOKUP_FIELD_MULTILINE_COMMENT=118 -LOOKUP_FIELD_WS=119 -METRICS_LINE_COMMENT=120 -METRICS_MULTILINE_COMMENT=121 -METRICS_WS=122 -CLOSING_METRICS_LINE_COMMENT=123 -CLOSING_METRICS_MULTILINE_COMMENT=124 -CLOSING_METRICS_WS=125 +MV_EXPAND=10 +RENAME=11 +ROW=12 +SHOW=13 +SORT=14 +STATS=15 +WHERE=16 +DEV_INLINESTATS=17 +DEV_LOOKUP=18 +DEV_MATCH=19 +DEV_METRICS=20 +UNKNOWN_CMD=21 +LINE_COMMENT=22 +MULTILINE_COMMENT=23 +WS=24 +PIPE=25 +QUOTED_STRING=26 +INTEGER_LITERAL=27 +DECIMAL_LITERAL=28 +BY=29 +AND=30 +ASC=31 +ASSIGN=32 +CAST_OP=33 +COMMA=34 +DESC=35 +DOT=36 +FALSE=37 +FIRST=38 +IN=39 +IS=40 +LAST=41 +LIKE=42 +LP=43 +NOT=44 +NULL=45 +NULLS=46 +OR=47 +PARAM=48 +RLIKE=49 +RP=50 +TRUE=51 +EQ=52 +CIEQ=53 +NEQ=54 +LT=55 +LTE=56 +GT=57 +GTE=58 +PLUS=59 +MINUS=60 +ASTERISK=61 +SLASH=62 +PERCENT=63 +NAMED_OR_POSITIONAL_PARAM=64 +OPENING_BRACKET=65 +CLOSING_BRACKET=66 +UNQUOTED_IDENTIFIER=67 +QUOTED_IDENTIFIER=68 +EXPR_LINE_COMMENT=69 +EXPR_MULTILINE_COMMENT=70 +EXPR_WS=71 +EXPLAIN_WS=72 +EXPLAIN_LINE_COMMENT=73 +EXPLAIN_MULTILINE_COMMENT=74 +METADATA=75 +UNQUOTED_SOURCE=76 +FROM_LINE_COMMENT=77 +FROM_MULTILINE_COMMENT=78 +FROM_WS=79 +ID_PATTERN=80 +PROJECT_LINE_COMMENT=81 +PROJECT_MULTILINE_COMMENT=82 +PROJECT_WS=83 +AS=84 +RENAME_LINE_COMMENT=85 +RENAME_MULTILINE_COMMENT=86 +RENAME_WS=87 +ON=88 +WITH=89 +ENRICH_POLICY_NAME=90 +ENRICH_LINE_COMMENT=91 +ENRICH_MULTILINE_COMMENT=92 +ENRICH_WS=93 +ENRICH_FIELD_LINE_COMMENT=94 +ENRICH_FIELD_MULTILINE_COMMENT=95 +ENRICH_FIELD_WS=96 +MVEXPAND_LINE_COMMENT=97 +MVEXPAND_MULTILINE_COMMENT=98 +MVEXPAND_WS=99 +INFO=100 +SHOW_LINE_COMMENT=101 +SHOW_MULTILINE_COMMENT=102 +SHOW_WS=103 +COLON=104 +SETTING=105 +SETTING_LINE_COMMENT=106 +SETTTING_MULTILINE_COMMENT=107 +SETTING_WS=108 +LOOKUP_LINE_COMMENT=109 +LOOKUP_MULTILINE_COMMENT=110 +LOOKUP_WS=111 +LOOKUP_FIELD_LINE_COMMENT=112 +LOOKUP_FIELD_MULTILINE_COMMENT=113 +LOOKUP_FIELD_WS=114 +METRICS_LINE_COMMENT=115 +METRICS_MULTILINE_COMMENT=116 +METRICS_WS=117 +CLOSING_METRICS_LINE_COMMENT=118 +CLOSING_METRICS_MULTILINE_COMMENT=119 +CLOSING_METRICS_WS=120 'dissect'=1 'drop'=2 'enrich'=3 @@ -132,55 +127,53 @@ CLOSING_METRICS_WS=125 'grok'=7 'keep'=8 'limit'=9 -'meta'=10 -'mv_expand'=11 -'rename'=12 -'row'=13 -'show'=14 -'sort'=15 -'stats'=16 -'where'=17 -'|'=26 -'by'=30 -'and'=31 -'asc'=32 -'='=33 -'::'=34 -','=35 -'desc'=36 -'.'=37 -'false'=38 -'first'=39 -'in'=40 -'is'=41 -'last'=42 -'like'=43 -'('=44 -'not'=45 -'null'=46 -'nulls'=47 -'or'=48 -'?'=49 -'rlike'=50 -')'=51 -'true'=52 -'=='=53 -'=~'=54 -'!='=55 -'<'=56 -'<='=57 -'>'=58 -'>='=59 -'+'=60 -'-'=61 -'*'=62 -'/'=63 -'%'=64 -']'=67 -'metadata'=76 -'as'=85 -'on'=89 -'with'=90 -'info'=101 -'functions'=105 -':'=109 +'mv_expand'=10 +'rename'=11 +'row'=12 +'show'=13 +'sort'=14 +'stats'=15 +'where'=16 +'|'=25 +'by'=29 +'and'=30 +'asc'=31 +'='=32 +'::'=33 +','=34 +'desc'=35 +'.'=36 +'false'=37 +'first'=38 +'in'=39 +'is'=40 +'last'=41 +'like'=42 +'('=43 +'not'=44 +'null'=45 +'nulls'=46 +'or'=47 +'?'=48 +'rlike'=49 +')'=50 +'true'=51 +'=='=52 +'=~'=53 +'!='=54 +'<'=55 +'<='=56 +'>'=57 +'>='=58 +'+'=59 +'-'=60 +'*'=61 +'/'=62 +'%'=63 +']'=66 +'metadata'=75 +'as'=84 +'on'=88 +'with'=89 +'info'=100 +':'=104 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index c39a2041a61b..5b4428ee2411 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -262,11 +262,9 @@ public class EsqlCapabilities { MATCH_OPERATOR(true), /** - * Support for the {@code META} keyword. Tests with this tag are - * intentionally excluded from mixed version clusters because we - * continually add functions, so they constantly fail if we don't. + * Removing support for the {@code META} keyword. */ - META, + NO_META, /** * Add CombineBinaryComparisons rule. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index 7c0f1fa3a8ad..8e238f9ed760 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -133,7 +133,6 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.Substring; import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToLower; import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToUpper; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Trim; -import org.elasticsearch.xpack.esql.plan.logical.meta.MetaFunctions; import org.elasticsearch.xpack.esql.session.Configuration; import java.lang.reflect.Constructor; @@ -450,31 +449,6 @@ public class EsqlFunctionRegistry { boolean variadic, boolean isAggregation ) { - public String fullSignature() { - StringBuilder builder = new StringBuilder(); - builder.append(MetaFunctions.withPipes(returnType)); - builder.append(" "); - builder.append(name); - builder.append("("); - for (int i = 0; i < args.size(); i++) { - ArgSignature arg = args.get(i); - if (i > 0) { - builder.append(", "); - } - if (arg.optional()) { - builder.append("?"); - } - builder.append(arg.name()); - if (i == args.size() - 1 && variadic) { - builder.append("..."); - } - builder.append(":"); - builder.append(MetaFunctions.withPipes(arg.type())); - } - builder.append(")"); - return builder.toString(); - } - /** * The name of every argument. */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index 8122a5688428..4f3a843ee09d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -9,7 +9,6 @@ null 'grok' 'keep' 'limit' -'meta' 'mv_expand' 'rename' 'row' @@ -104,10 +103,6 @@ null null null null -'functions' -null -null -null ':' null null @@ -137,7 +132,6 @@ FROM GROK KEEP LIMIT -META MV_EXPAND RENAME ROW @@ -232,10 +226,6 @@ INFO SHOW_LINE_COMMENT SHOW_MULTILINE_COMMENT SHOW_WS -FUNCTIONS -META_LINE_COMMENT -META_MULTILINE_COMMENT -META_WS COLON SETTING SETTING_LINE_COMMENT @@ -264,7 +254,6 @@ FROM GROK KEEP LIMIT -META MV_EXPAND RENAME ROW @@ -408,11 +397,6 @@ INFO SHOW_LINE_COMMENT SHOW_MULTILINE_COMMENT SHOW_WS -META_PIPE -FUNCTIONS -META_LINE_COMMENT -META_MULTILINE_COMMENT -META_WS SETTING_CLOSING_BRACKET COLON SETTING @@ -467,7 +451,6 @@ ENRICH_MODE ENRICH_FIELD_MODE MVEXPAND_MODE SHOW_MODE -META_MODE SETTING_MODE LOOKUP_MODE LOOKUP_FIELD_MODE @@ -475,4 +458,4 @@ METRICS_MODE CLOSING_METRICS_MODE atn: -[4, 0, 125, 1474, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 4, 21, 591, 8, 21, 11, 21, 12, 21, 592, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 601, 8, 22, 10, 22, 12, 22, 604, 9, 22, 1, 22, 3, 22, 607, 8, 22, 1, 22, 3, 22, 610, 8, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 5, 23, 619, 8, 23, 10, 23, 12, 23, 622, 9, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 4, 24, 630, 8, 24, 11, 24, 12, 24, 631, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 3, 30, 651, 8, 30, 1, 30, 4, 30, 654, 8, 30, 11, 30, 12, 30, 655, 1, 31, 1, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 665, 8, 33, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 3, 35, 672, 8, 35, 1, 36, 1, 36, 1, 36, 5, 36, 677, 8, 36, 10, 36, 12, 36, 680, 9, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 5, 36, 688, 8, 36, 10, 36, 12, 36, 691, 9, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 3, 36, 698, 8, 36, 1, 36, 3, 36, 701, 8, 36, 3, 36, 703, 8, 36, 1, 37, 4, 37, 706, 8, 37, 11, 37, 12, 37, 707, 1, 38, 4, 38, 711, 8, 38, 11, 38, 12, 38, 712, 1, 38, 1, 38, 5, 38, 717, 8, 38, 10, 38, 12, 38, 720, 9, 38, 1, 38, 1, 38, 4, 38, 724, 8, 38, 11, 38, 12, 38, 725, 1, 38, 4, 38, 729, 8, 38, 11, 38, 12, 38, 730, 1, 38, 1, 38, 5, 38, 735, 8, 38, 10, 38, 12, 38, 738, 9, 38, 3, 38, 740, 8, 38, 1, 38, 1, 38, 1, 38, 1, 38, 4, 38, 746, 8, 38, 11, 38, 12, 38, 747, 1, 38, 1, 38, 3, 38, 752, 8, 38, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 75, 1, 75, 1, 75, 3, 75, 879, 8, 75, 1, 75, 5, 75, 882, 8, 75, 10, 75, 12, 75, 885, 9, 75, 1, 75, 1, 75, 4, 75, 889, 8, 75, 11, 75, 12, 75, 890, 3, 75, 893, 8, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 1, 77, 1, 77, 1, 77, 1, 78, 1, 78, 5, 78, 907, 8, 78, 10, 78, 12, 78, 910, 9, 78, 1, 78, 1, 78, 3, 78, 914, 8, 78, 1, 78, 4, 78, 917, 8, 78, 11, 78, 12, 78, 918, 3, 78, 921, 8, 78, 1, 79, 1, 79, 4, 79, 925, 8, 79, 11, 79, 12, 79, 926, 1, 79, 1, 79, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 3, 96, 1004, 8, 96, 1, 97, 4, 97, 1007, 8, 97, 11, 97, 12, 97, 1008, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 3, 106, 1048, 8, 106, 1, 107, 1, 107, 3, 107, 1052, 8, 107, 1, 107, 5, 107, 1055, 8, 107, 10, 107, 12, 107, 1058, 9, 107, 1, 107, 1, 107, 3, 107, 1062, 8, 107, 1, 107, 4, 107, 1065, 8, 107, 11, 107, 12, 107, 1066, 3, 107, 1069, 8, 107, 1, 108, 1, 108, 4, 108, 1073, 8, 108, 11, 108, 12, 108, 1074, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 126, 4, 126, 1150, 8, 126, 11, 126, 12, 126, 1151, 1, 126, 1, 126, 3, 126, 1156, 8, 126, 1, 126, 4, 126, 1159, 8, 126, 11, 126, 12, 126, 1160, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 160, 4, 160, 1311, 8, 160, 11, 160, 12, 160, 1312, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 195, 2, 620, 689, 0, 196, 16, 1, 18, 2, 20, 3, 22, 4, 24, 5, 26, 6, 28, 7, 30, 8, 32, 9, 34, 10, 36, 11, 38, 12, 40, 13, 42, 14, 44, 15, 46, 16, 48, 17, 50, 18, 52, 19, 54, 20, 56, 21, 58, 22, 60, 23, 62, 24, 64, 25, 66, 26, 68, 0, 70, 0, 72, 0, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 27, 90, 28, 92, 29, 94, 30, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 0, 166, 65, 168, 66, 170, 67, 172, 68, 174, 0, 176, 69, 178, 70, 180, 71, 182, 72, 184, 0, 186, 0, 188, 73, 190, 74, 192, 75, 194, 0, 196, 0, 198, 0, 200, 0, 202, 0, 204, 0, 206, 76, 208, 0, 210, 77, 212, 0, 214, 0, 216, 78, 218, 79, 220, 80, 222, 0, 224, 0, 226, 0, 228, 0, 230, 0, 232, 81, 234, 82, 236, 83, 238, 84, 240, 0, 242, 0, 244, 0, 246, 0, 248, 85, 250, 0, 252, 86, 254, 87, 256, 88, 258, 0, 260, 0, 262, 89, 264, 90, 266, 0, 268, 91, 270, 0, 272, 92, 274, 93, 276, 94, 278, 0, 280, 0, 282, 0, 284, 0, 286, 0, 288, 0, 290, 0, 292, 95, 294, 96, 296, 97, 298, 0, 300, 0, 302, 0, 304, 0, 306, 98, 308, 99, 310, 100, 312, 0, 314, 101, 316, 102, 318, 103, 320, 104, 322, 0, 324, 105, 326, 106, 328, 107, 330, 108, 332, 0, 334, 109, 336, 110, 338, 111, 340, 112, 342, 113, 344, 0, 346, 0, 348, 0, 350, 0, 352, 0, 354, 0, 356, 0, 358, 114, 360, 115, 362, 116, 364, 0, 366, 0, 368, 0, 370, 0, 372, 117, 374, 118, 376, 119, 378, 0, 380, 0, 382, 0, 384, 120, 386, 121, 388, 122, 390, 0, 392, 0, 394, 123, 396, 124, 398, 125, 400, 0, 402, 0, 404, 0, 406, 0, 16, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 35, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1501, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 0, 56, 1, 0, 0, 0, 0, 58, 1, 0, 0, 0, 0, 60, 1, 0, 0, 0, 0, 62, 1, 0, 0, 0, 0, 64, 1, 0, 0, 0, 1, 66, 1, 0, 0, 0, 1, 88, 1, 0, 0, 0, 1, 90, 1, 0, 0, 0, 1, 92, 1, 0, 0, 0, 1, 94, 1, 0, 0, 0, 1, 96, 1, 0, 0, 0, 1, 98, 1, 0, 0, 0, 1, 100, 1, 0, 0, 0, 1, 102, 1, 0, 0, 0, 1, 104, 1, 0, 0, 0, 1, 106, 1, 0, 0, 0, 1, 108, 1, 0, 0, 0, 1, 110, 1, 0, 0, 0, 1, 112, 1, 0, 0, 0, 1, 114, 1, 0, 0, 0, 1, 116, 1, 0, 0, 0, 1, 118, 1, 0, 0, 0, 1, 120, 1, 0, 0, 0, 1, 122, 1, 0, 0, 0, 1, 124, 1, 0, 0, 0, 1, 126, 1, 0, 0, 0, 1, 128, 1, 0, 0, 0, 1, 130, 1, 0, 0, 0, 1, 132, 1, 0, 0, 0, 1, 134, 1, 0, 0, 0, 1, 136, 1, 0, 0, 0, 1, 138, 1, 0, 0, 0, 1, 140, 1, 0, 0, 0, 1, 142, 1, 0, 0, 0, 1, 144, 1, 0, 0, 0, 1, 146, 1, 0, 0, 0, 1, 148, 1, 0, 0, 0, 1, 150, 1, 0, 0, 0, 1, 152, 1, 0, 0, 0, 1, 154, 1, 0, 0, 0, 1, 156, 1, 0, 0, 0, 1, 158, 1, 0, 0, 0, 1, 160, 1, 0, 0, 0, 1, 162, 1, 0, 0, 0, 1, 164, 1, 0, 0, 0, 1, 166, 1, 0, 0, 0, 1, 168, 1, 0, 0, 0, 1, 170, 1, 0, 0, 0, 1, 172, 1, 0, 0, 0, 1, 176, 1, 0, 0, 0, 1, 178, 1, 0, 0, 0, 1, 180, 1, 0, 0, 0, 1, 182, 1, 0, 0, 0, 2, 184, 1, 0, 0, 0, 2, 186, 1, 0, 0, 0, 2, 188, 1, 0, 0, 0, 2, 190, 1, 0, 0, 0, 2, 192, 1, 0, 0, 0, 3, 194, 1, 0, 0, 0, 3, 196, 1, 0, 0, 0, 3, 198, 1, 0, 0, 0, 3, 200, 1, 0, 0, 0, 3, 202, 1, 0, 0, 0, 3, 204, 1, 0, 0, 0, 3, 206, 1, 0, 0, 0, 3, 210, 1, 0, 0, 0, 3, 212, 1, 0, 0, 0, 3, 214, 1, 0, 0, 0, 3, 216, 1, 0, 0, 0, 3, 218, 1, 0, 0, 0, 3, 220, 1, 0, 0, 0, 4, 222, 1, 0, 0, 0, 4, 224, 1, 0, 0, 0, 4, 226, 1, 0, 0, 0, 4, 232, 1, 0, 0, 0, 4, 234, 1, 0, 0, 0, 4, 236, 1, 0, 0, 0, 4, 238, 1, 0, 0, 0, 5, 240, 1, 0, 0, 0, 5, 242, 1, 0, 0, 0, 5, 244, 1, 0, 0, 0, 5, 246, 1, 0, 0, 0, 5, 248, 1, 0, 0, 0, 5, 250, 1, 0, 0, 0, 5, 252, 1, 0, 0, 0, 5, 254, 1, 0, 0, 0, 5, 256, 1, 0, 0, 0, 6, 258, 1, 0, 0, 0, 6, 260, 1, 0, 0, 0, 6, 262, 1, 0, 0, 0, 6, 264, 1, 0, 0, 0, 6, 268, 1, 0, 0, 0, 6, 270, 1, 0, 0, 0, 6, 272, 1, 0, 0, 0, 6, 274, 1, 0, 0, 0, 6, 276, 1, 0, 0, 0, 7, 278, 1, 0, 0, 0, 7, 280, 1, 0, 0, 0, 7, 282, 1, 0, 0, 0, 7, 284, 1, 0, 0, 0, 7, 286, 1, 0, 0, 0, 7, 288, 1, 0, 0, 0, 7, 290, 1, 0, 0, 0, 7, 292, 1, 0, 0, 0, 7, 294, 1, 0, 0, 0, 7, 296, 1, 0, 0, 0, 8, 298, 1, 0, 0, 0, 8, 300, 1, 0, 0, 0, 8, 302, 1, 0, 0, 0, 8, 304, 1, 0, 0, 0, 8, 306, 1, 0, 0, 0, 8, 308, 1, 0, 0, 0, 8, 310, 1, 0, 0, 0, 9, 312, 1, 0, 0, 0, 9, 314, 1, 0, 0, 0, 9, 316, 1, 0, 0, 0, 9, 318, 1, 0, 0, 0, 9, 320, 1, 0, 0, 0, 10, 322, 1, 0, 0, 0, 10, 324, 1, 0, 0, 0, 10, 326, 1, 0, 0, 0, 10, 328, 1, 0, 0, 0, 10, 330, 1, 0, 0, 0, 11, 332, 1, 0, 0, 0, 11, 334, 1, 0, 0, 0, 11, 336, 1, 0, 0, 0, 11, 338, 1, 0, 0, 0, 11, 340, 1, 0, 0, 0, 11, 342, 1, 0, 0, 0, 12, 344, 1, 0, 0, 0, 12, 346, 1, 0, 0, 0, 12, 348, 1, 0, 0, 0, 12, 350, 1, 0, 0, 0, 12, 352, 1, 0, 0, 0, 12, 354, 1, 0, 0, 0, 12, 356, 1, 0, 0, 0, 12, 358, 1, 0, 0, 0, 12, 360, 1, 0, 0, 0, 12, 362, 1, 0, 0, 0, 13, 364, 1, 0, 0, 0, 13, 366, 1, 0, 0, 0, 13, 368, 1, 0, 0, 0, 13, 370, 1, 0, 0, 0, 13, 372, 1, 0, 0, 0, 13, 374, 1, 0, 0, 0, 13, 376, 1, 0, 0, 0, 14, 378, 1, 0, 0, 0, 14, 380, 1, 0, 0, 0, 14, 382, 1, 0, 0, 0, 14, 384, 1, 0, 0, 0, 14, 386, 1, 0, 0, 0, 14, 388, 1, 0, 0, 0, 15, 390, 1, 0, 0, 0, 15, 392, 1, 0, 0, 0, 15, 394, 1, 0, 0, 0, 15, 396, 1, 0, 0, 0, 15, 398, 1, 0, 0, 0, 15, 400, 1, 0, 0, 0, 15, 402, 1, 0, 0, 0, 15, 404, 1, 0, 0, 0, 15, 406, 1, 0, 0, 0, 16, 408, 1, 0, 0, 0, 18, 418, 1, 0, 0, 0, 20, 425, 1, 0, 0, 0, 22, 434, 1, 0, 0, 0, 24, 441, 1, 0, 0, 0, 26, 451, 1, 0, 0, 0, 28, 458, 1, 0, 0, 0, 30, 465, 1, 0, 0, 0, 32, 472, 1, 0, 0, 0, 34, 480, 1, 0, 0, 0, 36, 487, 1, 0, 0, 0, 38, 499, 1, 0, 0, 0, 40, 508, 1, 0, 0, 0, 42, 514, 1, 0, 0, 0, 44, 521, 1, 0, 0, 0, 46, 528, 1, 0, 0, 0, 48, 536, 1, 0, 0, 0, 50, 544, 1, 0, 0, 0, 52, 559, 1, 0, 0, 0, 54, 569, 1, 0, 0, 0, 56, 578, 1, 0, 0, 0, 58, 590, 1, 0, 0, 0, 60, 596, 1, 0, 0, 0, 62, 613, 1, 0, 0, 0, 64, 629, 1, 0, 0, 0, 66, 635, 1, 0, 0, 0, 68, 639, 1, 0, 0, 0, 70, 641, 1, 0, 0, 0, 72, 643, 1, 0, 0, 0, 74, 646, 1, 0, 0, 0, 76, 648, 1, 0, 0, 0, 78, 657, 1, 0, 0, 0, 80, 659, 1, 0, 0, 0, 82, 664, 1, 0, 0, 0, 84, 666, 1, 0, 0, 0, 86, 671, 1, 0, 0, 0, 88, 702, 1, 0, 0, 0, 90, 705, 1, 0, 0, 0, 92, 751, 1, 0, 0, 0, 94, 753, 1, 0, 0, 0, 96, 756, 1, 0, 0, 0, 98, 760, 1, 0, 0, 0, 100, 764, 1, 0, 0, 0, 102, 766, 1, 0, 0, 0, 104, 769, 1, 0, 0, 0, 106, 771, 1, 0, 0, 0, 108, 776, 1, 0, 0, 0, 110, 778, 1, 0, 0, 0, 112, 784, 1, 0, 0, 0, 114, 790, 1, 0, 0, 0, 116, 793, 1, 0, 0, 0, 118, 796, 1, 0, 0, 0, 120, 801, 1, 0, 0, 0, 122, 806, 1, 0, 0, 0, 124, 808, 1, 0, 0, 0, 126, 812, 1, 0, 0, 0, 128, 817, 1, 0, 0, 0, 130, 823, 1, 0, 0, 0, 132, 826, 1, 0, 0, 0, 134, 828, 1, 0, 0, 0, 136, 834, 1, 0, 0, 0, 138, 836, 1, 0, 0, 0, 140, 841, 1, 0, 0, 0, 142, 844, 1, 0, 0, 0, 144, 847, 1, 0, 0, 0, 146, 850, 1, 0, 0, 0, 148, 852, 1, 0, 0, 0, 150, 855, 1, 0, 0, 0, 152, 857, 1, 0, 0, 0, 154, 860, 1, 0, 0, 0, 156, 862, 1, 0, 0, 0, 158, 864, 1, 0, 0, 0, 160, 866, 1, 0, 0, 0, 162, 868, 1, 0, 0, 0, 164, 870, 1, 0, 0, 0, 166, 892, 1, 0, 0, 0, 168, 894, 1, 0, 0, 0, 170, 899, 1, 0, 0, 0, 172, 920, 1, 0, 0, 0, 174, 922, 1, 0, 0, 0, 176, 930, 1, 0, 0, 0, 178, 932, 1, 0, 0, 0, 180, 936, 1, 0, 0, 0, 182, 940, 1, 0, 0, 0, 184, 944, 1, 0, 0, 0, 186, 949, 1, 0, 0, 0, 188, 954, 1, 0, 0, 0, 190, 958, 1, 0, 0, 0, 192, 962, 1, 0, 0, 0, 194, 966, 1, 0, 0, 0, 196, 971, 1, 0, 0, 0, 198, 975, 1, 0, 0, 0, 200, 979, 1, 0, 0, 0, 202, 983, 1, 0, 0, 0, 204, 987, 1, 0, 0, 0, 206, 991, 1, 0, 0, 0, 208, 1003, 1, 0, 0, 0, 210, 1006, 1, 0, 0, 0, 212, 1010, 1, 0, 0, 0, 214, 1014, 1, 0, 0, 0, 216, 1018, 1, 0, 0, 0, 218, 1022, 1, 0, 0, 0, 220, 1026, 1, 0, 0, 0, 222, 1030, 1, 0, 0, 0, 224, 1035, 1, 0, 0, 0, 226, 1039, 1, 0, 0, 0, 228, 1047, 1, 0, 0, 0, 230, 1068, 1, 0, 0, 0, 232, 1072, 1, 0, 0, 0, 234, 1076, 1, 0, 0, 0, 236, 1080, 1, 0, 0, 0, 238, 1084, 1, 0, 0, 0, 240, 1088, 1, 0, 0, 0, 242, 1093, 1, 0, 0, 0, 244, 1097, 1, 0, 0, 0, 246, 1101, 1, 0, 0, 0, 248, 1105, 1, 0, 0, 0, 250, 1108, 1, 0, 0, 0, 252, 1112, 1, 0, 0, 0, 254, 1116, 1, 0, 0, 0, 256, 1120, 1, 0, 0, 0, 258, 1124, 1, 0, 0, 0, 260, 1129, 1, 0, 0, 0, 262, 1134, 1, 0, 0, 0, 264, 1139, 1, 0, 0, 0, 266, 1146, 1, 0, 0, 0, 268, 1155, 1, 0, 0, 0, 270, 1162, 1, 0, 0, 0, 272, 1166, 1, 0, 0, 0, 274, 1170, 1, 0, 0, 0, 276, 1174, 1, 0, 0, 0, 278, 1178, 1, 0, 0, 0, 280, 1184, 1, 0, 0, 0, 282, 1188, 1, 0, 0, 0, 284, 1192, 1, 0, 0, 0, 286, 1196, 1, 0, 0, 0, 288, 1200, 1, 0, 0, 0, 290, 1204, 1, 0, 0, 0, 292, 1208, 1, 0, 0, 0, 294, 1212, 1, 0, 0, 0, 296, 1216, 1, 0, 0, 0, 298, 1220, 1, 0, 0, 0, 300, 1225, 1, 0, 0, 0, 302, 1229, 1, 0, 0, 0, 304, 1233, 1, 0, 0, 0, 306, 1237, 1, 0, 0, 0, 308, 1241, 1, 0, 0, 0, 310, 1245, 1, 0, 0, 0, 312, 1249, 1, 0, 0, 0, 314, 1254, 1, 0, 0, 0, 316, 1259, 1, 0, 0, 0, 318, 1263, 1, 0, 0, 0, 320, 1267, 1, 0, 0, 0, 322, 1271, 1, 0, 0, 0, 324, 1276, 1, 0, 0, 0, 326, 1286, 1, 0, 0, 0, 328, 1290, 1, 0, 0, 0, 330, 1294, 1, 0, 0, 0, 332, 1298, 1, 0, 0, 0, 334, 1303, 1, 0, 0, 0, 336, 1310, 1, 0, 0, 0, 338, 1314, 1, 0, 0, 0, 340, 1318, 1, 0, 0, 0, 342, 1322, 1, 0, 0, 0, 344, 1326, 1, 0, 0, 0, 346, 1331, 1, 0, 0, 0, 348, 1335, 1, 0, 0, 0, 350, 1339, 1, 0, 0, 0, 352, 1343, 1, 0, 0, 0, 354, 1348, 1, 0, 0, 0, 356, 1352, 1, 0, 0, 0, 358, 1356, 1, 0, 0, 0, 360, 1360, 1, 0, 0, 0, 362, 1364, 1, 0, 0, 0, 364, 1368, 1, 0, 0, 0, 366, 1374, 1, 0, 0, 0, 368, 1378, 1, 0, 0, 0, 370, 1382, 1, 0, 0, 0, 372, 1386, 1, 0, 0, 0, 374, 1390, 1, 0, 0, 0, 376, 1394, 1, 0, 0, 0, 378, 1398, 1, 0, 0, 0, 380, 1403, 1, 0, 0, 0, 382, 1409, 1, 0, 0, 0, 384, 1415, 1, 0, 0, 0, 386, 1419, 1, 0, 0, 0, 388, 1423, 1, 0, 0, 0, 390, 1427, 1, 0, 0, 0, 392, 1433, 1, 0, 0, 0, 394, 1439, 1, 0, 0, 0, 396, 1443, 1, 0, 0, 0, 398, 1447, 1, 0, 0, 0, 400, 1451, 1, 0, 0, 0, 402, 1457, 1, 0, 0, 0, 404, 1463, 1, 0, 0, 0, 406, 1469, 1, 0, 0, 0, 408, 409, 7, 0, 0, 0, 409, 410, 7, 1, 0, 0, 410, 411, 7, 2, 0, 0, 411, 412, 7, 2, 0, 0, 412, 413, 7, 3, 0, 0, 413, 414, 7, 4, 0, 0, 414, 415, 7, 5, 0, 0, 415, 416, 1, 0, 0, 0, 416, 417, 6, 0, 0, 0, 417, 17, 1, 0, 0, 0, 418, 419, 7, 0, 0, 0, 419, 420, 7, 6, 0, 0, 420, 421, 7, 7, 0, 0, 421, 422, 7, 8, 0, 0, 422, 423, 1, 0, 0, 0, 423, 424, 6, 1, 1, 0, 424, 19, 1, 0, 0, 0, 425, 426, 7, 3, 0, 0, 426, 427, 7, 9, 0, 0, 427, 428, 7, 6, 0, 0, 428, 429, 7, 1, 0, 0, 429, 430, 7, 4, 0, 0, 430, 431, 7, 10, 0, 0, 431, 432, 1, 0, 0, 0, 432, 433, 6, 2, 2, 0, 433, 21, 1, 0, 0, 0, 434, 435, 7, 3, 0, 0, 435, 436, 7, 11, 0, 0, 436, 437, 7, 12, 0, 0, 437, 438, 7, 13, 0, 0, 438, 439, 1, 0, 0, 0, 439, 440, 6, 3, 0, 0, 440, 23, 1, 0, 0, 0, 441, 442, 7, 3, 0, 0, 442, 443, 7, 14, 0, 0, 443, 444, 7, 8, 0, 0, 444, 445, 7, 13, 0, 0, 445, 446, 7, 12, 0, 0, 446, 447, 7, 1, 0, 0, 447, 448, 7, 9, 0, 0, 448, 449, 1, 0, 0, 0, 449, 450, 6, 4, 3, 0, 450, 25, 1, 0, 0, 0, 451, 452, 7, 15, 0, 0, 452, 453, 7, 6, 0, 0, 453, 454, 7, 7, 0, 0, 454, 455, 7, 16, 0, 0, 455, 456, 1, 0, 0, 0, 456, 457, 6, 5, 4, 0, 457, 27, 1, 0, 0, 0, 458, 459, 7, 17, 0, 0, 459, 460, 7, 6, 0, 0, 460, 461, 7, 7, 0, 0, 461, 462, 7, 18, 0, 0, 462, 463, 1, 0, 0, 0, 463, 464, 6, 6, 0, 0, 464, 29, 1, 0, 0, 0, 465, 466, 7, 18, 0, 0, 466, 467, 7, 3, 0, 0, 467, 468, 7, 3, 0, 0, 468, 469, 7, 8, 0, 0, 469, 470, 1, 0, 0, 0, 470, 471, 6, 7, 1, 0, 471, 31, 1, 0, 0, 0, 472, 473, 7, 13, 0, 0, 473, 474, 7, 1, 0, 0, 474, 475, 7, 16, 0, 0, 475, 476, 7, 1, 0, 0, 476, 477, 7, 5, 0, 0, 477, 478, 1, 0, 0, 0, 478, 479, 6, 8, 0, 0, 479, 33, 1, 0, 0, 0, 480, 481, 7, 16, 0, 0, 481, 482, 7, 3, 0, 0, 482, 483, 7, 5, 0, 0, 483, 484, 7, 12, 0, 0, 484, 485, 1, 0, 0, 0, 485, 486, 6, 9, 5, 0, 486, 35, 1, 0, 0, 0, 487, 488, 7, 16, 0, 0, 488, 489, 7, 11, 0, 0, 489, 490, 5, 95, 0, 0, 490, 491, 7, 3, 0, 0, 491, 492, 7, 14, 0, 0, 492, 493, 7, 8, 0, 0, 493, 494, 7, 12, 0, 0, 494, 495, 7, 9, 0, 0, 495, 496, 7, 0, 0, 0, 496, 497, 1, 0, 0, 0, 497, 498, 6, 10, 6, 0, 498, 37, 1, 0, 0, 0, 499, 500, 7, 6, 0, 0, 500, 501, 7, 3, 0, 0, 501, 502, 7, 9, 0, 0, 502, 503, 7, 12, 0, 0, 503, 504, 7, 16, 0, 0, 504, 505, 7, 3, 0, 0, 505, 506, 1, 0, 0, 0, 506, 507, 6, 11, 7, 0, 507, 39, 1, 0, 0, 0, 508, 509, 7, 6, 0, 0, 509, 510, 7, 7, 0, 0, 510, 511, 7, 19, 0, 0, 511, 512, 1, 0, 0, 0, 512, 513, 6, 12, 0, 0, 513, 41, 1, 0, 0, 0, 514, 515, 7, 2, 0, 0, 515, 516, 7, 10, 0, 0, 516, 517, 7, 7, 0, 0, 517, 518, 7, 19, 0, 0, 518, 519, 1, 0, 0, 0, 519, 520, 6, 13, 8, 0, 520, 43, 1, 0, 0, 0, 521, 522, 7, 2, 0, 0, 522, 523, 7, 7, 0, 0, 523, 524, 7, 6, 0, 0, 524, 525, 7, 5, 0, 0, 525, 526, 1, 0, 0, 0, 526, 527, 6, 14, 0, 0, 527, 45, 1, 0, 0, 0, 528, 529, 7, 2, 0, 0, 529, 530, 7, 5, 0, 0, 530, 531, 7, 12, 0, 0, 531, 532, 7, 5, 0, 0, 532, 533, 7, 2, 0, 0, 533, 534, 1, 0, 0, 0, 534, 535, 6, 15, 0, 0, 535, 47, 1, 0, 0, 0, 536, 537, 7, 19, 0, 0, 537, 538, 7, 10, 0, 0, 538, 539, 7, 3, 0, 0, 539, 540, 7, 6, 0, 0, 540, 541, 7, 3, 0, 0, 541, 542, 1, 0, 0, 0, 542, 543, 6, 16, 0, 0, 543, 49, 1, 0, 0, 0, 544, 545, 4, 17, 0, 0, 545, 546, 7, 1, 0, 0, 546, 547, 7, 9, 0, 0, 547, 548, 7, 13, 0, 0, 548, 549, 7, 1, 0, 0, 549, 550, 7, 9, 0, 0, 550, 551, 7, 3, 0, 0, 551, 552, 7, 2, 0, 0, 552, 553, 7, 5, 0, 0, 553, 554, 7, 12, 0, 0, 554, 555, 7, 5, 0, 0, 555, 556, 7, 2, 0, 0, 556, 557, 1, 0, 0, 0, 557, 558, 6, 17, 0, 0, 558, 51, 1, 0, 0, 0, 559, 560, 4, 18, 1, 0, 560, 561, 7, 13, 0, 0, 561, 562, 7, 7, 0, 0, 562, 563, 7, 7, 0, 0, 563, 564, 7, 18, 0, 0, 564, 565, 7, 20, 0, 0, 565, 566, 7, 8, 0, 0, 566, 567, 1, 0, 0, 0, 567, 568, 6, 18, 9, 0, 568, 53, 1, 0, 0, 0, 569, 570, 4, 19, 2, 0, 570, 571, 7, 16, 0, 0, 571, 572, 7, 12, 0, 0, 572, 573, 7, 5, 0, 0, 573, 574, 7, 4, 0, 0, 574, 575, 7, 10, 0, 0, 575, 576, 1, 0, 0, 0, 576, 577, 6, 19, 0, 0, 577, 55, 1, 0, 0, 0, 578, 579, 4, 20, 3, 0, 579, 580, 7, 16, 0, 0, 580, 581, 7, 3, 0, 0, 581, 582, 7, 5, 0, 0, 582, 583, 7, 6, 0, 0, 583, 584, 7, 1, 0, 0, 584, 585, 7, 4, 0, 0, 585, 586, 7, 2, 0, 0, 586, 587, 1, 0, 0, 0, 587, 588, 6, 20, 10, 0, 588, 57, 1, 0, 0, 0, 589, 591, 8, 21, 0, 0, 590, 589, 1, 0, 0, 0, 591, 592, 1, 0, 0, 0, 592, 590, 1, 0, 0, 0, 592, 593, 1, 0, 0, 0, 593, 594, 1, 0, 0, 0, 594, 595, 6, 21, 0, 0, 595, 59, 1, 0, 0, 0, 596, 597, 5, 47, 0, 0, 597, 598, 5, 47, 0, 0, 598, 602, 1, 0, 0, 0, 599, 601, 8, 22, 0, 0, 600, 599, 1, 0, 0, 0, 601, 604, 1, 0, 0, 0, 602, 600, 1, 0, 0, 0, 602, 603, 1, 0, 0, 0, 603, 606, 1, 0, 0, 0, 604, 602, 1, 0, 0, 0, 605, 607, 5, 13, 0, 0, 606, 605, 1, 0, 0, 0, 606, 607, 1, 0, 0, 0, 607, 609, 1, 0, 0, 0, 608, 610, 5, 10, 0, 0, 609, 608, 1, 0, 0, 0, 609, 610, 1, 0, 0, 0, 610, 611, 1, 0, 0, 0, 611, 612, 6, 22, 11, 0, 612, 61, 1, 0, 0, 0, 613, 614, 5, 47, 0, 0, 614, 615, 5, 42, 0, 0, 615, 620, 1, 0, 0, 0, 616, 619, 3, 62, 23, 0, 617, 619, 9, 0, 0, 0, 618, 616, 1, 0, 0, 0, 618, 617, 1, 0, 0, 0, 619, 622, 1, 0, 0, 0, 620, 621, 1, 0, 0, 0, 620, 618, 1, 0, 0, 0, 621, 623, 1, 0, 0, 0, 622, 620, 1, 0, 0, 0, 623, 624, 5, 42, 0, 0, 624, 625, 5, 47, 0, 0, 625, 626, 1, 0, 0, 0, 626, 627, 6, 23, 11, 0, 627, 63, 1, 0, 0, 0, 628, 630, 7, 23, 0, 0, 629, 628, 1, 0, 0, 0, 630, 631, 1, 0, 0, 0, 631, 629, 1, 0, 0, 0, 631, 632, 1, 0, 0, 0, 632, 633, 1, 0, 0, 0, 633, 634, 6, 24, 11, 0, 634, 65, 1, 0, 0, 0, 635, 636, 5, 124, 0, 0, 636, 637, 1, 0, 0, 0, 637, 638, 6, 25, 12, 0, 638, 67, 1, 0, 0, 0, 639, 640, 7, 24, 0, 0, 640, 69, 1, 0, 0, 0, 641, 642, 7, 25, 0, 0, 642, 71, 1, 0, 0, 0, 643, 644, 5, 92, 0, 0, 644, 645, 7, 26, 0, 0, 645, 73, 1, 0, 0, 0, 646, 647, 8, 27, 0, 0, 647, 75, 1, 0, 0, 0, 648, 650, 7, 3, 0, 0, 649, 651, 7, 28, 0, 0, 650, 649, 1, 0, 0, 0, 650, 651, 1, 0, 0, 0, 651, 653, 1, 0, 0, 0, 652, 654, 3, 68, 26, 0, 653, 652, 1, 0, 0, 0, 654, 655, 1, 0, 0, 0, 655, 653, 1, 0, 0, 0, 655, 656, 1, 0, 0, 0, 656, 77, 1, 0, 0, 0, 657, 658, 5, 64, 0, 0, 658, 79, 1, 0, 0, 0, 659, 660, 5, 96, 0, 0, 660, 81, 1, 0, 0, 0, 661, 665, 8, 29, 0, 0, 662, 663, 5, 96, 0, 0, 663, 665, 5, 96, 0, 0, 664, 661, 1, 0, 0, 0, 664, 662, 1, 0, 0, 0, 665, 83, 1, 0, 0, 0, 666, 667, 5, 95, 0, 0, 667, 85, 1, 0, 0, 0, 668, 672, 3, 70, 27, 0, 669, 672, 3, 68, 26, 0, 670, 672, 3, 84, 34, 0, 671, 668, 1, 0, 0, 0, 671, 669, 1, 0, 0, 0, 671, 670, 1, 0, 0, 0, 672, 87, 1, 0, 0, 0, 673, 678, 5, 34, 0, 0, 674, 677, 3, 72, 28, 0, 675, 677, 3, 74, 29, 0, 676, 674, 1, 0, 0, 0, 676, 675, 1, 0, 0, 0, 677, 680, 1, 0, 0, 0, 678, 676, 1, 0, 0, 0, 678, 679, 1, 0, 0, 0, 679, 681, 1, 0, 0, 0, 680, 678, 1, 0, 0, 0, 681, 703, 5, 34, 0, 0, 682, 683, 5, 34, 0, 0, 683, 684, 5, 34, 0, 0, 684, 685, 5, 34, 0, 0, 685, 689, 1, 0, 0, 0, 686, 688, 8, 22, 0, 0, 687, 686, 1, 0, 0, 0, 688, 691, 1, 0, 0, 0, 689, 690, 1, 0, 0, 0, 689, 687, 1, 0, 0, 0, 690, 692, 1, 0, 0, 0, 691, 689, 1, 0, 0, 0, 692, 693, 5, 34, 0, 0, 693, 694, 5, 34, 0, 0, 694, 695, 5, 34, 0, 0, 695, 697, 1, 0, 0, 0, 696, 698, 5, 34, 0, 0, 697, 696, 1, 0, 0, 0, 697, 698, 1, 0, 0, 0, 698, 700, 1, 0, 0, 0, 699, 701, 5, 34, 0, 0, 700, 699, 1, 0, 0, 0, 700, 701, 1, 0, 0, 0, 701, 703, 1, 0, 0, 0, 702, 673, 1, 0, 0, 0, 702, 682, 1, 0, 0, 0, 703, 89, 1, 0, 0, 0, 704, 706, 3, 68, 26, 0, 705, 704, 1, 0, 0, 0, 706, 707, 1, 0, 0, 0, 707, 705, 1, 0, 0, 0, 707, 708, 1, 0, 0, 0, 708, 91, 1, 0, 0, 0, 709, 711, 3, 68, 26, 0, 710, 709, 1, 0, 0, 0, 711, 712, 1, 0, 0, 0, 712, 710, 1, 0, 0, 0, 712, 713, 1, 0, 0, 0, 713, 714, 1, 0, 0, 0, 714, 718, 3, 108, 46, 0, 715, 717, 3, 68, 26, 0, 716, 715, 1, 0, 0, 0, 717, 720, 1, 0, 0, 0, 718, 716, 1, 0, 0, 0, 718, 719, 1, 0, 0, 0, 719, 752, 1, 0, 0, 0, 720, 718, 1, 0, 0, 0, 721, 723, 3, 108, 46, 0, 722, 724, 3, 68, 26, 0, 723, 722, 1, 0, 0, 0, 724, 725, 1, 0, 0, 0, 725, 723, 1, 0, 0, 0, 725, 726, 1, 0, 0, 0, 726, 752, 1, 0, 0, 0, 727, 729, 3, 68, 26, 0, 728, 727, 1, 0, 0, 0, 729, 730, 1, 0, 0, 0, 730, 728, 1, 0, 0, 0, 730, 731, 1, 0, 0, 0, 731, 739, 1, 0, 0, 0, 732, 736, 3, 108, 46, 0, 733, 735, 3, 68, 26, 0, 734, 733, 1, 0, 0, 0, 735, 738, 1, 0, 0, 0, 736, 734, 1, 0, 0, 0, 736, 737, 1, 0, 0, 0, 737, 740, 1, 0, 0, 0, 738, 736, 1, 0, 0, 0, 739, 732, 1, 0, 0, 0, 739, 740, 1, 0, 0, 0, 740, 741, 1, 0, 0, 0, 741, 742, 3, 76, 30, 0, 742, 752, 1, 0, 0, 0, 743, 745, 3, 108, 46, 0, 744, 746, 3, 68, 26, 0, 745, 744, 1, 0, 0, 0, 746, 747, 1, 0, 0, 0, 747, 745, 1, 0, 0, 0, 747, 748, 1, 0, 0, 0, 748, 749, 1, 0, 0, 0, 749, 750, 3, 76, 30, 0, 750, 752, 1, 0, 0, 0, 751, 710, 1, 0, 0, 0, 751, 721, 1, 0, 0, 0, 751, 728, 1, 0, 0, 0, 751, 743, 1, 0, 0, 0, 752, 93, 1, 0, 0, 0, 753, 754, 7, 30, 0, 0, 754, 755, 7, 31, 0, 0, 755, 95, 1, 0, 0, 0, 756, 757, 7, 12, 0, 0, 757, 758, 7, 9, 0, 0, 758, 759, 7, 0, 0, 0, 759, 97, 1, 0, 0, 0, 760, 761, 7, 12, 0, 0, 761, 762, 7, 2, 0, 0, 762, 763, 7, 4, 0, 0, 763, 99, 1, 0, 0, 0, 764, 765, 5, 61, 0, 0, 765, 101, 1, 0, 0, 0, 766, 767, 5, 58, 0, 0, 767, 768, 5, 58, 0, 0, 768, 103, 1, 0, 0, 0, 769, 770, 5, 44, 0, 0, 770, 105, 1, 0, 0, 0, 771, 772, 7, 0, 0, 0, 772, 773, 7, 3, 0, 0, 773, 774, 7, 2, 0, 0, 774, 775, 7, 4, 0, 0, 775, 107, 1, 0, 0, 0, 776, 777, 5, 46, 0, 0, 777, 109, 1, 0, 0, 0, 778, 779, 7, 15, 0, 0, 779, 780, 7, 12, 0, 0, 780, 781, 7, 13, 0, 0, 781, 782, 7, 2, 0, 0, 782, 783, 7, 3, 0, 0, 783, 111, 1, 0, 0, 0, 784, 785, 7, 15, 0, 0, 785, 786, 7, 1, 0, 0, 786, 787, 7, 6, 0, 0, 787, 788, 7, 2, 0, 0, 788, 789, 7, 5, 0, 0, 789, 113, 1, 0, 0, 0, 790, 791, 7, 1, 0, 0, 791, 792, 7, 9, 0, 0, 792, 115, 1, 0, 0, 0, 793, 794, 7, 1, 0, 0, 794, 795, 7, 2, 0, 0, 795, 117, 1, 0, 0, 0, 796, 797, 7, 13, 0, 0, 797, 798, 7, 12, 0, 0, 798, 799, 7, 2, 0, 0, 799, 800, 7, 5, 0, 0, 800, 119, 1, 0, 0, 0, 801, 802, 7, 13, 0, 0, 802, 803, 7, 1, 0, 0, 803, 804, 7, 18, 0, 0, 804, 805, 7, 3, 0, 0, 805, 121, 1, 0, 0, 0, 806, 807, 5, 40, 0, 0, 807, 123, 1, 0, 0, 0, 808, 809, 7, 9, 0, 0, 809, 810, 7, 7, 0, 0, 810, 811, 7, 5, 0, 0, 811, 125, 1, 0, 0, 0, 812, 813, 7, 9, 0, 0, 813, 814, 7, 20, 0, 0, 814, 815, 7, 13, 0, 0, 815, 816, 7, 13, 0, 0, 816, 127, 1, 0, 0, 0, 817, 818, 7, 9, 0, 0, 818, 819, 7, 20, 0, 0, 819, 820, 7, 13, 0, 0, 820, 821, 7, 13, 0, 0, 821, 822, 7, 2, 0, 0, 822, 129, 1, 0, 0, 0, 823, 824, 7, 7, 0, 0, 824, 825, 7, 6, 0, 0, 825, 131, 1, 0, 0, 0, 826, 827, 5, 63, 0, 0, 827, 133, 1, 0, 0, 0, 828, 829, 7, 6, 0, 0, 829, 830, 7, 13, 0, 0, 830, 831, 7, 1, 0, 0, 831, 832, 7, 18, 0, 0, 832, 833, 7, 3, 0, 0, 833, 135, 1, 0, 0, 0, 834, 835, 5, 41, 0, 0, 835, 137, 1, 0, 0, 0, 836, 837, 7, 5, 0, 0, 837, 838, 7, 6, 0, 0, 838, 839, 7, 20, 0, 0, 839, 840, 7, 3, 0, 0, 840, 139, 1, 0, 0, 0, 841, 842, 5, 61, 0, 0, 842, 843, 5, 61, 0, 0, 843, 141, 1, 0, 0, 0, 844, 845, 5, 61, 0, 0, 845, 846, 5, 126, 0, 0, 846, 143, 1, 0, 0, 0, 847, 848, 5, 33, 0, 0, 848, 849, 5, 61, 0, 0, 849, 145, 1, 0, 0, 0, 850, 851, 5, 60, 0, 0, 851, 147, 1, 0, 0, 0, 852, 853, 5, 60, 0, 0, 853, 854, 5, 61, 0, 0, 854, 149, 1, 0, 0, 0, 855, 856, 5, 62, 0, 0, 856, 151, 1, 0, 0, 0, 857, 858, 5, 62, 0, 0, 858, 859, 5, 61, 0, 0, 859, 153, 1, 0, 0, 0, 860, 861, 5, 43, 0, 0, 861, 155, 1, 0, 0, 0, 862, 863, 5, 45, 0, 0, 863, 157, 1, 0, 0, 0, 864, 865, 5, 42, 0, 0, 865, 159, 1, 0, 0, 0, 866, 867, 5, 47, 0, 0, 867, 161, 1, 0, 0, 0, 868, 869, 5, 37, 0, 0, 869, 163, 1, 0, 0, 0, 870, 871, 4, 74, 4, 0, 871, 872, 3, 54, 19, 0, 872, 873, 1, 0, 0, 0, 873, 874, 6, 74, 13, 0, 874, 165, 1, 0, 0, 0, 875, 878, 3, 132, 58, 0, 876, 879, 3, 70, 27, 0, 877, 879, 3, 84, 34, 0, 878, 876, 1, 0, 0, 0, 878, 877, 1, 0, 0, 0, 879, 883, 1, 0, 0, 0, 880, 882, 3, 86, 35, 0, 881, 880, 1, 0, 0, 0, 882, 885, 1, 0, 0, 0, 883, 881, 1, 0, 0, 0, 883, 884, 1, 0, 0, 0, 884, 893, 1, 0, 0, 0, 885, 883, 1, 0, 0, 0, 886, 888, 3, 132, 58, 0, 887, 889, 3, 68, 26, 0, 888, 887, 1, 0, 0, 0, 889, 890, 1, 0, 0, 0, 890, 888, 1, 0, 0, 0, 890, 891, 1, 0, 0, 0, 891, 893, 1, 0, 0, 0, 892, 875, 1, 0, 0, 0, 892, 886, 1, 0, 0, 0, 893, 167, 1, 0, 0, 0, 894, 895, 5, 91, 0, 0, 895, 896, 1, 0, 0, 0, 896, 897, 6, 76, 0, 0, 897, 898, 6, 76, 0, 0, 898, 169, 1, 0, 0, 0, 899, 900, 5, 93, 0, 0, 900, 901, 1, 0, 0, 0, 901, 902, 6, 77, 12, 0, 902, 903, 6, 77, 12, 0, 903, 171, 1, 0, 0, 0, 904, 908, 3, 70, 27, 0, 905, 907, 3, 86, 35, 0, 906, 905, 1, 0, 0, 0, 907, 910, 1, 0, 0, 0, 908, 906, 1, 0, 0, 0, 908, 909, 1, 0, 0, 0, 909, 921, 1, 0, 0, 0, 910, 908, 1, 0, 0, 0, 911, 914, 3, 84, 34, 0, 912, 914, 3, 78, 31, 0, 913, 911, 1, 0, 0, 0, 913, 912, 1, 0, 0, 0, 914, 916, 1, 0, 0, 0, 915, 917, 3, 86, 35, 0, 916, 915, 1, 0, 0, 0, 917, 918, 1, 0, 0, 0, 918, 916, 1, 0, 0, 0, 918, 919, 1, 0, 0, 0, 919, 921, 1, 0, 0, 0, 920, 904, 1, 0, 0, 0, 920, 913, 1, 0, 0, 0, 921, 173, 1, 0, 0, 0, 922, 924, 3, 80, 32, 0, 923, 925, 3, 82, 33, 0, 924, 923, 1, 0, 0, 0, 925, 926, 1, 0, 0, 0, 926, 924, 1, 0, 0, 0, 926, 927, 1, 0, 0, 0, 927, 928, 1, 0, 0, 0, 928, 929, 3, 80, 32, 0, 929, 175, 1, 0, 0, 0, 930, 931, 3, 174, 79, 0, 931, 177, 1, 0, 0, 0, 932, 933, 3, 60, 22, 0, 933, 934, 1, 0, 0, 0, 934, 935, 6, 81, 11, 0, 935, 179, 1, 0, 0, 0, 936, 937, 3, 62, 23, 0, 937, 938, 1, 0, 0, 0, 938, 939, 6, 82, 11, 0, 939, 181, 1, 0, 0, 0, 940, 941, 3, 64, 24, 0, 941, 942, 1, 0, 0, 0, 942, 943, 6, 83, 11, 0, 943, 183, 1, 0, 0, 0, 944, 945, 3, 168, 76, 0, 945, 946, 1, 0, 0, 0, 946, 947, 6, 84, 14, 0, 947, 948, 6, 84, 15, 0, 948, 185, 1, 0, 0, 0, 949, 950, 3, 66, 25, 0, 950, 951, 1, 0, 0, 0, 951, 952, 6, 85, 16, 0, 952, 953, 6, 85, 12, 0, 953, 187, 1, 0, 0, 0, 954, 955, 3, 64, 24, 0, 955, 956, 1, 0, 0, 0, 956, 957, 6, 86, 11, 0, 957, 189, 1, 0, 0, 0, 958, 959, 3, 60, 22, 0, 959, 960, 1, 0, 0, 0, 960, 961, 6, 87, 11, 0, 961, 191, 1, 0, 0, 0, 962, 963, 3, 62, 23, 0, 963, 964, 1, 0, 0, 0, 964, 965, 6, 88, 11, 0, 965, 193, 1, 0, 0, 0, 966, 967, 3, 66, 25, 0, 967, 968, 1, 0, 0, 0, 968, 969, 6, 89, 16, 0, 969, 970, 6, 89, 12, 0, 970, 195, 1, 0, 0, 0, 971, 972, 3, 168, 76, 0, 972, 973, 1, 0, 0, 0, 973, 974, 6, 90, 14, 0, 974, 197, 1, 0, 0, 0, 975, 976, 3, 170, 77, 0, 976, 977, 1, 0, 0, 0, 977, 978, 6, 91, 17, 0, 978, 199, 1, 0, 0, 0, 979, 980, 3, 334, 159, 0, 980, 981, 1, 0, 0, 0, 981, 982, 6, 92, 18, 0, 982, 201, 1, 0, 0, 0, 983, 984, 3, 104, 44, 0, 984, 985, 1, 0, 0, 0, 985, 986, 6, 93, 19, 0, 986, 203, 1, 0, 0, 0, 987, 988, 3, 100, 42, 0, 988, 989, 1, 0, 0, 0, 989, 990, 6, 94, 20, 0, 990, 205, 1, 0, 0, 0, 991, 992, 7, 16, 0, 0, 992, 993, 7, 3, 0, 0, 993, 994, 7, 5, 0, 0, 994, 995, 7, 12, 0, 0, 995, 996, 7, 0, 0, 0, 996, 997, 7, 12, 0, 0, 997, 998, 7, 5, 0, 0, 998, 999, 7, 12, 0, 0, 999, 207, 1, 0, 0, 0, 1000, 1004, 8, 32, 0, 0, 1001, 1002, 5, 47, 0, 0, 1002, 1004, 8, 33, 0, 0, 1003, 1000, 1, 0, 0, 0, 1003, 1001, 1, 0, 0, 0, 1004, 209, 1, 0, 0, 0, 1005, 1007, 3, 208, 96, 0, 1006, 1005, 1, 0, 0, 0, 1007, 1008, 1, 0, 0, 0, 1008, 1006, 1, 0, 0, 0, 1008, 1009, 1, 0, 0, 0, 1009, 211, 1, 0, 0, 0, 1010, 1011, 3, 210, 97, 0, 1011, 1012, 1, 0, 0, 0, 1012, 1013, 6, 98, 21, 0, 1013, 213, 1, 0, 0, 0, 1014, 1015, 3, 88, 36, 0, 1015, 1016, 1, 0, 0, 0, 1016, 1017, 6, 99, 22, 0, 1017, 215, 1, 0, 0, 0, 1018, 1019, 3, 60, 22, 0, 1019, 1020, 1, 0, 0, 0, 1020, 1021, 6, 100, 11, 0, 1021, 217, 1, 0, 0, 0, 1022, 1023, 3, 62, 23, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1025, 6, 101, 11, 0, 1025, 219, 1, 0, 0, 0, 1026, 1027, 3, 64, 24, 0, 1027, 1028, 1, 0, 0, 0, 1028, 1029, 6, 102, 11, 0, 1029, 221, 1, 0, 0, 0, 1030, 1031, 3, 66, 25, 0, 1031, 1032, 1, 0, 0, 0, 1032, 1033, 6, 103, 16, 0, 1033, 1034, 6, 103, 12, 0, 1034, 223, 1, 0, 0, 0, 1035, 1036, 3, 108, 46, 0, 1036, 1037, 1, 0, 0, 0, 1037, 1038, 6, 104, 23, 0, 1038, 225, 1, 0, 0, 0, 1039, 1040, 3, 104, 44, 0, 1040, 1041, 1, 0, 0, 0, 1041, 1042, 6, 105, 19, 0, 1042, 227, 1, 0, 0, 0, 1043, 1048, 3, 70, 27, 0, 1044, 1048, 3, 68, 26, 0, 1045, 1048, 3, 84, 34, 0, 1046, 1048, 3, 158, 71, 0, 1047, 1043, 1, 0, 0, 0, 1047, 1044, 1, 0, 0, 0, 1047, 1045, 1, 0, 0, 0, 1047, 1046, 1, 0, 0, 0, 1048, 229, 1, 0, 0, 0, 1049, 1052, 3, 70, 27, 0, 1050, 1052, 3, 158, 71, 0, 1051, 1049, 1, 0, 0, 0, 1051, 1050, 1, 0, 0, 0, 1052, 1056, 1, 0, 0, 0, 1053, 1055, 3, 228, 106, 0, 1054, 1053, 1, 0, 0, 0, 1055, 1058, 1, 0, 0, 0, 1056, 1054, 1, 0, 0, 0, 1056, 1057, 1, 0, 0, 0, 1057, 1069, 1, 0, 0, 0, 1058, 1056, 1, 0, 0, 0, 1059, 1062, 3, 84, 34, 0, 1060, 1062, 3, 78, 31, 0, 1061, 1059, 1, 0, 0, 0, 1061, 1060, 1, 0, 0, 0, 1062, 1064, 1, 0, 0, 0, 1063, 1065, 3, 228, 106, 0, 1064, 1063, 1, 0, 0, 0, 1065, 1066, 1, 0, 0, 0, 1066, 1064, 1, 0, 0, 0, 1066, 1067, 1, 0, 0, 0, 1067, 1069, 1, 0, 0, 0, 1068, 1051, 1, 0, 0, 0, 1068, 1061, 1, 0, 0, 0, 1069, 231, 1, 0, 0, 0, 1070, 1073, 3, 230, 107, 0, 1071, 1073, 3, 174, 79, 0, 1072, 1070, 1, 0, 0, 0, 1072, 1071, 1, 0, 0, 0, 1073, 1074, 1, 0, 0, 0, 1074, 1072, 1, 0, 0, 0, 1074, 1075, 1, 0, 0, 0, 1075, 233, 1, 0, 0, 0, 1076, 1077, 3, 60, 22, 0, 1077, 1078, 1, 0, 0, 0, 1078, 1079, 6, 109, 11, 0, 1079, 235, 1, 0, 0, 0, 1080, 1081, 3, 62, 23, 0, 1081, 1082, 1, 0, 0, 0, 1082, 1083, 6, 110, 11, 0, 1083, 237, 1, 0, 0, 0, 1084, 1085, 3, 64, 24, 0, 1085, 1086, 1, 0, 0, 0, 1086, 1087, 6, 111, 11, 0, 1087, 239, 1, 0, 0, 0, 1088, 1089, 3, 66, 25, 0, 1089, 1090, 1, 0, 0, 0, 1090, 1091, 6, 112, 16, 0, 1091, 1092, 6, 112, 12, 0, 1092, 241, 1, 0, 0, 0, 1093, 1094, 3, 100, 42, 0, 1094, 1095, 1, 0, 0, 0, 1095, 1096, 6, 113, 20, 0, 1096, 243, 1, 0, 0, 0, 1097, 1098, 3, 104, 44, 0, 1098, 1099, 1, 0, 0, 0, 1099, 1100, 6, 114, 19, 0, 1100, 245, 1, 0, 0, 0, 1101, 1102, 3, 108, 46, 0, 1102, 1103, 1, 0, 0, 0, 1103, 1104, 6, 115, 23, 0, 1104, 247, 1, 0, 0, 0, 1105, 1106, 7, 12, 0, 0, 1106, 1107, 7, 2, 0, 0, 1107, 249, 1, 0, 0, 0, 1108, 1109, 3, 232, 108, 0, 1109, 1110, 1, 0, 0, 0, 1110, 1111, 6, 117, 24, 0, 1111, 251, 1, 0, 0, 0, 1112, 1113, 3, 60, 22, 0, 1113, 1114, 1, 0, 0, 0, 1114, 1115, 6, 118, 11, 0, 1115, 253, 1, 0, 0, 0, 1116, 1117, 3, 62, 23, 0, 1117, 1118, 1, 0, 0, 0, 1118, 1119, 6, 119, 11, 0, 1119, 255, 1, 0, 0, 0, 1120, 1121, 3, 64, 24, 0, 1121, 1122, 1, 0, 0, 0, 1122, 1123, 6, 120, 11, 0, 1123, 257, 1, 0, 0, 0, 1124, 1125, 3, 66, 25, 0, 1125, 1126, 1, 0, 0, 0, 1126, 1127, 6, 121, 16, 0, 1127, 1128, 6, 121, 12, 0, 1128, 259, 1, 0, 0, 0, 1129, 1130, 3, 168, 76, 0, 1130, 1131, 1, 0, 0, 0, 1131, 1132, 6, 122, 14, 0, 1132, 1133, 6, 122, 25, 0, 1133, 261, 1, 0, 0, 0, 1134, 1135, 7, 7, 0, 0, 1135, 1136, 7, 9, 0, 0, 1136, 1137, 1, 0, 0, 0, 1137, 1138, 6, 123, 26, 0, 1138, 263, 1, 0, 0, 0, 1139, 1140, 7, 19, 0, 0, 1140, 1141, 7, 1, 0, 0, 1141, 1142, 7, 5, 0, 0, 1142, 1143, 7, 10, 0, 0, 1143, 1144, 1, 0, 0, 0, 1144, 1145, 6, 124, 26, 0, 1145, 265, 1, 0, 0, 0, 1146, 1147, 8, 34, 0, 0, 1147, 267, 1, 0, 0, 0, 1148, 1150, 3, 266, 125, 0, 1149, 1148, 1, 0, 0, 0, 1150, 1151, 1, 0, 0, 0, 1151, 1149, 1, 0, 0, 0, 1151, 1152, 1, 0, 0, 0, 1152, 1153, 1, 0, 0, 0, 1153, 1154, 3, 334, 159, 0, 1154, 1156, 1, 0, 0, 0, 1155, 1149, 1, 0, 0, 0, 1155, 1156, 1, 0, 0, 0, 1156, 1158, 1, 0, 0, 0, 1157, 1159, 3, 266, 125, 0, 1158, 1157, 1, 0, 0, 0, 1159, 1160, 1, 0, 0, 0, 1160, 1158, 1, 0, 0, 0, 1160, 1161, 1, 0, 0, 0, 1161, 269, 1, 0, 0, 0, 1162, 1163, 3, 268, 126, 0, 1163, 1164, 1, 0, 0, 0, 1164, 1165, 6, 127, 27, 0, 1165, 271, 1, 0, 0, 0, 1166, 1167, 3, 60, 22, 0, 1167, 1168, 1, 0, 0, 0, 1168, 1169, 6, 128, 11, 0, 1169, 273, 1, 0, 0, 0, 1170, 1171, 3, 62, 23, 0, 1171, 1172, 1, 0, 0, 0, 1172, 1173, 6, 129, 11, 0, 1173, 275, 1, 0, 0, 0, 1174, 1175, 3, 64, 24, 0, 1175, 1176, 1, 0, 0, 0, 1176, 1177, 6, 130, 11, 0, 1177, 277, 1, 0, 0, 0, 1178, 1179, 3, 66, 25, 0, 1179, 1180, 1, 0, 0, 0, 1180, 1181, 6, 131, 16, 0, 1181, 1182, 6, 131, 12, 0, 1182, 1183, 6, 131, 12, 0, 1183, 279, 1, 0, 0, 0, 1184, 1185, 3, 100, 42, 0, 1185, 1186, 1, 0, 0, 0, 1186, 1187, 6, 132, 20, 0, 1187, 281, 1, 0, 0, 0, 1188, 1189, 3, 104, 44, 0, 1189, 1190, 1, 0, 0, 0, 1190, 1191, 6, 133, 19, 0, 1191, 283, 1, 0, 0, 0, 1192, 1193, 3, 108, 46, 0, 1193, 1194, 1, 0, 0, 0, 1194, 1195, 6, 134, 23, 0, 1195, 285, 1, 0, 0, 0, 1196, 1197, 3, 264, 124, 0, 1197, 1198, 1, 0, 0, 0, 1198, 1199, 6, 135, 28, 0, 1199, 287, 1, 0, 0, 0, 1200, 1201, 3, 232, 108, 0, 1201, 1202, 1, 0, 0, 0, 1202, 1203, 6, 136, 24, 0, 1203, 289, 1, 0, 0, 0, 1204, 1205, 3, 176, 80, 0, 1205, 1206, 1, 0, 0, 0, 1206, 1207, 6, 137, 29, 0, 1207, 291, 1, 0, 0, 0, 1208, 1209, 3, 60, 22, 0, 1209, 1210, 1, 0, 0, 0, 1210, 1211, 6, 138, 11, 0, 1211, 293, 1, 0, 0, 0, 1212, 1213, 3, 62, 23, 0, 1213, 1214, 1, 0, 0, 0, 1214, 1215, 6, 139, 11, 0, 1215, 295, 1, 0, 0, 0, 1216, 1217, 3, 64, 24, 0, 1217, 1218, 1, 0, 0, 0, 1218, 1219, 6, 140, 11, 0, 1219, 297, 1, 0, 0, 0, 1220, 1221, 3, 66, 25, 0, 1221, 1222, 1, 0, 0, 0, 1222, 1223, 6, 141, 16, 0, 1223, 1224, 6, 141, 12, 0, 1224, 299, 1, 0, 0, 0, 1225, 1226, 3, 108, 46, 0, 1226, 1227, 1, 0, 0, 0, 1227, 1228, 6, 142, 23, 0, 1228, 301, 1, 0, 0, 0, 1229, 1230, 3, 176, 80, 0, 1230, 1231, 1, 0, 0, 0, 1231, 1232, 6, 143, 29, 0, 1232, 303, 1, 0, 0, 0, 1233, 1234, 3, 172, 78, 0, 1234, 1235, 1, 0, 0, 0, 1235, 1236, 6, 144, 30, 0, 1236, 305, 1, 0, 0, 0, 1237, 1238, 3, 60, 22, 0, 1238, 1239, 1, 0, 0, 0, 1239, 1240, 6, 145, 11, 0, 1240, 307, 1, 0, 0, 0, 1241, 1242, 3, 62, 23, 0, 1242, 1243, 1, 0, 0, 0, 1243, 1244, 6, 146, 11, 0, 1244, 309, 1, 0, 0, 0, 1245, 1246, 3, 64, 24, 0, 1246, 1247, 1, 0, 0, 0, 1247, 1248, 6, 147, 11, 0, 1248, 311, 1, 0, 0, 0, 1249, 1250, 3, 66, 25, 0, 1250, 1251, 1, 0, 0, 0, 1251, 1252, 6, 148, 16, 0, 1252, 1253, 6, 148, 12, 0, 1253, 313, 1, 0, 0, 0, 1254, 1255, 7, 1, 0, 0, 1255, 1256, 7, 9, 0, 0, 1256, 1257, 7, 15, 0, 0, 1257, 1258, 7, 7, 0, 0, 1258, 315, 1, 0, 0, 0, 1259, 1260, 3, 60, 22, 0, 1260, 1261, 1, 0, 0, 0, 1261, 1262, 6, 150, 11, 0, 1262, 317, 1, 0, 0, 0, 1263, 1264, 3, 62, 23, 0, 1264, 1265, 1, 0, 0, 0, 1265, 1266, 6, 151, 11, 0, 1266, 319, 1, 0, 0, 0, 1267, 1268, 3, 64, 24, 0, 1268, 1269, 1, 0, 0, 0, 1269, 1270, 6, 152, 11, 0, 1270, 321, 1, 0, 0, 0, 1271, 1272, 3, 66, 25, 0, 1272, 1273, 1, 0, 0, 0, 1273, 1274, 6, 153, 16, 0, 1274, 1275, 6, 153, 12, 0, 1275, 323, 1, 0, 0, 0, 1276, 1277, 7, 15, 0, 0, 1277, 1278, 7, 20, 0, 0, 1278, 1279, 7, 9, 0, 0, 1279, 1280, 7, 4, 0, 0, 1280, 1281, 7, 5, 0, 0, 1281, 1282, 7, 1, 0, 0, 1282, 1283, 7, 7, 0, 0, 1283, 1284, 7, 9, 0, 0, 1284, 1285, 7, 2, 0, 0, 1285, 325, 1, 0, 0, 0, 1286, 1287, 3, 60, 22, 0, 1287, 1288, 1, 0, 0, 0, 1288, 1289, 6, 155, 11, 0, 1289, 327, 1, 0, 0, 0, 1290, 1291, 3, 62, 23, 0, 1291, 1292, 1, 0, 0, 0, 1292, 1293, 6, 156, 11, 0, 1293, 329, 1, 0, 0, 0, 1294, 1295, 3, 64, 24, 0, 1295, 1296, 1, 0, 0, 0, 1296, 1297, 6, 157, 11, 0, 1297, 331, 1, 0, 0, 0, 1298, 1299, 3, 170, 77, 0, 1299, 1300, 1, 0, 0, 0, 1300, 1301, 6, 158, 17, 0, 1301, 1302, 6, 158, 12, 0, 1302, 333, 1, 0, 0, 0, 1303, 1304, 5, 58, 0, 0, 1304, 335, 1, 0, 0, 0, 1305, 1311, 3, 78, 31, 0, 1306, 1311, 3, 68, 26, 0, 1307, 1311, 3, 108, 46, 0, 1308, 1311, 3, 70, 27, 0, 1309, 1311, 3, 84, 34, 0, 1310, 1305, 1, 0, 0, 0, 1310, 1306, 1, 0, 0, 0, 1310, 1307, 1, 0, 0, 0, 1310, 1308, 1, 0, 0, 0, 1310, 1309, 1, 0, 0, 0, 1311, 1312, 1, 0, 0, 0, 1312, 1310, 1, 0, 0, 0, 1312, 1313, 1, 0, 0, 0, 1313, 337, 1, 0, 0, 0, 1314, 1315, 3, 60, 22, 0, 1315, 1316, 1, 0, 0, 0, 1316, 1317, 6, 161, 11, 0, 1317, 339, 1, 0, 0, 0, 1318, 1319, 3, 62, 23, 0, 1319, 1320, 1, 0, 0, 0, 1320, 1321, 6, 162, 11, 0, 1321, 341, 1, 0, 0, 0, 1322, 1323, 3, 64, 24, 0, 1323, 1324, 1, 0, 0, 0, 1324, 1325, 6, 163, 11, 0, 1325, 343, 1, 0, 0, 0, 1326, 1327, 3, 66, 25, 0, 1327, 1328, 1, 0, 0, 0, 1328, 1329, 6, 164, 16, 0, 1329, 1330, 6, 164, 12, 0, 1330, 345, 1, 0, 0, 0, 1331, 1332, 3, 334, 159, 0, 1332, 1333, 1, 0, 0, 0, 1333, 1334, 6, 165, 18, 0, 1334, 347, 1, 0, 0, 0, 1335, 1336, 3, 104, 44, 0, 1336, 1337, 1, 0, 0, 0, 1337, 1338, 6, 166, 19, 0, 1338, 349, 1, 0, 0, 0, 1339, 1340, 3, 108, 46, 0, 1340, 1341, 1, 0, 0, 0, 1341, 1342, 6, 167, 23, 0, 1342, 351, 1, 0, 0, 0, 1343, 1344, 3, 262, 123, 0, 1344, 1345, 1, 0, 0, 0, 1345, 1346, 6, 168, 31, 0, 1346, 1347, 6, 168, 32, 0, 1347, 353, 1, 0, 0, 0, 1348, 1349, 3, 210, 97, 0, 1349, 1350, 1, 0, 0, 0, 1350, 1351, 6, 169, 21, 0, 1351, 355, 1, 0, 0, 0, 1352, 1353, 3, 88, 36, 0, 1353, 1354, 1, 0, 0, 0, 1354, 1355, 6, 170, 22, 0, 1355, 357, 1, 0, 0, 0, 1356, 1357, 3, 60, 22, 0, 1357, 1358, 1, 0, 0, 0, 1358, 1359, 6, 171, 11, 0, 1359, 359, 1, 0, 0, 0, 1360, 1361, 3, 62, 23, 0, 1361, 1362, 1, 0, 0, 0, 1362, 1363, 6, 172, 11, 0, 1363, 361, 1, 0, 0, 0, 1364, 1365, 3, 64, 24, 0, 1365, 1366, 1, 0, 0, 0, 1366, 1367, 6, 173, 11, 0, 1367, 363, 1, 0, 0, 0, 1368, 1369, 3, 66, 25, 0, 1369, 1370, 1, 0, 0, 0, 1370, 1371, 6, 174, 16, 0, 1371, 1372, 6, 174, 12, 0, 1372, 1373, 6, 174, 12, 0, 1373, 365, 1, 0, 0, 0, 1374, 1375, 3, 104, 44, 0, 1375, 1376, 1, 0, 0, 0, 1376, 1377, 6, 175, 19, 0, 1377, 367, 1, 0, 0, 0, 1378, 1379, 3, 108, 46, 0, 1379, 1380, 1, 0, 0, 0, 1380, 1381, 6, 176, 23, 0, 1381, 369, 1, 0, 0, 0, 1382, 1383, 3, 232, 108, 0, 1383, 1384, 1, 0, 0, 0, 1384, 1385, 6, 177, 24, 0, 1385, 371, 1, 0, 0, 0, 1386, 1387, 3, 60, 22, 0, 1387, 1388, 1, 0, 0, 0, 1388, 1389, 6, 178, 11, 0, 1389, 373, 1, 0, 0, 0, 1390, 1391, 3, 62, 23, 0, 1391, 1392, 1, 0, 0, 0, 1392, 1393, 6, 179, 11, 0, 1393, 375, 1, 0, 0, 0, 1394, 1395, 3, 64, 24, 0, 1395, 1396, 1, 0, 0, 0, 1396, 1397, 6, 180, 11, 0, 1397, 377, 1, 0, 0, 0, 1398, 1399, 3, 66, 25, 0, 1399, 1400, 1, 0, 0, 0, 1400, 1401, 6, 181, 16, 0, 1401, 1402, 6, 181, 12, 0, 1402, 379, 1, 0, 0, 0, 1403, 1404, 3, 210, 97, 0, 1404, 1405, 1, 0, 0, 0, 1405, 1406, 6, 182, 21, 0, 1406, 1407, 6, 182, 12, 0, 1407, 1408, 6, 182, 33, 0, 1408, 381, 1, 0, 0, 0, 1409, 1410, 3, 88, 36, 0, 1410, 1411, 1, 0, 0, 0, 1411, 1412, 6, 183, 22, 0, 1412, 1413, 6, 183, 12, 0, 1413, 1414, 6, 183, 33, 0, 1414, 383, 1, 0, 0, 0, 1415, 1416, 3, 60, 22, 0, 1416, 1417, 1, 0, 0, 0, 1417, 1418, 6, 184, 11, 0, 1418, 385, 1, 0, 0, 0, 1419, 1420, 3, 62, 23, 0, 1420, 1421, 1, 0, 0, 0, 1421, 1422, 6, 185, 11, 0, 1422, 387, 1, 0, 0, 0, 1423, 1424, 3, 64, 24, 0, 1424, 1425, 1, 0, 0, 0, 1425, 1426, 6, 186, 11, 0, 1426, 389, 1, 0, 0, 0, 1427, 1428, 3, 334, 159, 0, 1428, 1429, 1, 0, 0, 0, 1429, 1430, 6, 187, 18, 0, 1430, 1431, 6, 187, 12, 0, 1431, 1432, 6, 187, 10, 0, 1432, 391, 1, 0, 0, 0, 1433, 1434, 3, 104, 44, 0, 1434, 1435, 1, 0, 0, 0, 1435, 1436, 6, 188, 19, 0, 1436, 1437, 6, 188, 12, 0, 1437, 1438, 6, 188, 10, 0, 1438, 393, 1, 0, 0, 0, 1439, 1440, 3, 60, 22, 0, 1440, 1441, 1, 0, 0, 0, 1441, 1442, 6, 189, 11, 0, 1442, 395, 1, 0, 0, 0, 1443, 1444, 3, 62, 23, 0, 1444, 1445, 1, 0, 0, 0, 1445, 1446, 6, 190, 11, 0, 1446, 397, 1, 0, 0, 0, 1447, 1448, 3, 64, 24, 0, 1448, 1449, 1, 0, 0, 0, 1449, 1450, 6, 191, 11, 0, 1450, 399, 1, 0, 0, 0, 1451, 1452, 3, 176, 80, 0, 1452, 1453, 1, 0, 0, 0, 1453, 1454, 6, 192, 12, 0, 1454, 1455, 6, 192, 0, 0, 1455, 1456, 6, 192, 29, 0, 1456, 401, 1, 0, 0, 0, 1457, 1458, 3, 172, 78, 0, 1458, 1459, 1, 0, 0, 0, 1459, 1460, 6, 193, 12, 0, 1460, 1461, 6, 193, 0, 0, 1461, 1462, 6, 193, 30, 0, 1462, 403, 1, 0, 0, 0, 1463, 1464, 3, 94, 39, 0, 1464, 1465, 1, 0, 0, 0, 1465, 1466, 6, 194, 12, 0, 1466, 1467, 6, 194, 0, 0, 1467, 1468, 6, 194, 34, 0, 1468, 405, 1, 0, 0, 0, 1469, 1470, 3, 66, 25, 0, 1470, 1471, 1, 0, 0, 0, 1471, 1472, 6, 195, 16, 0, 1472, 1473, 6, 195, 12, 0, 1473, 407, 1, 0, 0, 0, 66, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 592, 602, 606, 609, 618, 620, 631, 650, 655, 664, 671, 676, 678, 689, 697, 700, 702, 707, 712, 718, 725, 730, 736, 739, 747, 751, 878, 883, 890, 892, 908, 913, 918, 920, 926, 1003, 1008, 1047, 1051, 1056, 1061, 1066, 1068, 1072, 1074, 1151, 1155, 1160, 1310, 1312, 35, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 10, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 12, 0, 5, 14, 0, 0, 1, 0, 4, 0, 0, 7, 20, 0, 7, 66, 0, 5, 0, 0, 7, 26, 0, 7, 67, 0, 7, 109, 0, 7, 35, 0, 7, 33, 0, 7, 77, 0, 7, 27, 0, 7, 37, 0, 7, 81, 0, 5, 11, 0, 5, 7, 0, 7, 91, 0, 7, 90, 0, 7, 69, 0, 7, 68, 0, 7, 89, 0, 5, 13, 0, 5, 15, 0, 7, 30, 0] \ No newline at end of file +[4, 0, 120, 1427, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 4, 20, 571, 8, 20, 11, 20, 12, 20, 572, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 581, 8, 21, 10, 21, 12, 21, 584, 9, 21, 1, 21, 3, 21, 587, 8, 21, 1, 21, 3, 21, 590, 8, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 599, 8, 22, 10, 22, 12, 22, 602, 9, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 4, 23, 610, 8, 23, 11, 23, 12, 23, 611, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 3, 29, 631, 8, 29, 1, 29, 4, 29, 634, 8, 29, 11, 29, 12, 29, 635, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 3, 32, 645, 8, 32, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 3, 34, 652, 8, 34, 1, 35, 1, 35, 1, 35, 5, 35, 657, 8, 35, 10, 35, 12, 35, 660, 9, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 668, 8, 35, 10, 35, 12, 35, 671, 9, 35, 1, 35, 1, 35, 1, 35, 1, 35, 1, 35, 3, 35, 678, 8, 35, 1, 35, 3, 35, 681, 8, 35, 3, 35, 683, 8, 35, 1, 36, 4, 36, 686, 8, 36, 11, 36, 12, 36, 687, 1, 37, 4, 37, 691, 8, 37, 11, 37, 12, 37, 692, 1, 37, 1, 37, 5, 37, 697, 8, 37, 10, 37, 12, 37, 700, 9, 37, 1, 37, 1, 37, 4, 37, 704, 8, 37, 11, 37, 12, 37, 705, 1, 37, 4, 37, 709, 8, 37, 11, 37, 12, 37, 710, 1, 37, 1, 37, 5, 37, 715, 8, 37, 10, 37, 12, 37, 718, 9, 37, 3, 37, 720, 8, 37, 1, 37, 1, 37, 1, 37, 1, 37, 4, 37, 726, 8, 37, 11, 37, 12, 37, 727, 1, 37, 1, 37, 3, 37, 732, 8, 37, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 73, 1, 73, 1, 73, 1, 74, 1, 74, 1, 74, 3, 74, 859, 8, 74, 1, 74, 5, 74, 862, 8, 74, 10, 74, 12, 74, 865, 9, 74, 1, 74, 1, 74, 4, 74, 869, 8, 74, 11, 74, 12, 74, 870, 3, 74, 873, 8, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 5, 77, 887, 8, 77, 10, 77, 12, 77, 890, 9, 77, 1, 77, 1, 77, 3, 77, 894, 8, 77, 1, 77, 4, 77, 897, 8, 77, 11, 77, 12, 77, 898, 3, 77, 901, 8, 77, 1, 78, 1, 78, 4, 78, 905, 8, 78, 11, 78, 12, 78, 906, 1, 78, 1, 78, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 3, 95, 984, 8, 95, 1, 96, 4, 96, 987, 8, 96, 11, 96, 12, 96, 988, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 3, 105, 1028, 8, 105, 1, 106, 1, 106, 3, 106, 1032, 8, 106, 1, 106, 5, 106, 1035, 8, 106, 10, 106, 12, 106, 1038, 9, 106, 1, 106, 1, 106, 3, 106, 1042, 8, 106, 1, 106, 4, 106, 1045, 8, 106, 11, 106, 12, 106, 1046, 3, 106, 1049, 8, 106, 1, 107, 1, 107, 4, 107, 1053, 8, 107, 11, 107, 12, 107, 1054, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 125, 4, 125, 1130, 8, 125, 11, 125, 12, 125, 1131, 1, 125, 1, 125, 3, 125, 1136, 8, 125, 1, 125, 4, 125, 1139, 8, 125, 11, 125, 12, 125, 1140, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 4, 154, 1264, 8, 154, 11, 154, 12, 154, 1265, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 189, 2, 600, 669, 0, 190, 15, 1, 17, 2, 19, 3, 21, 4, 23, 5, 25, 6, 27, 7, 29, 8, 31, 9, 33, 10, 35, 11, 37, 12, 39, 13, 41, 14, 43, 15, 45, 16, 47, 17, 49, 18, 51, 19, 53, 20, 55, 21, 57, 22, 59, 23, 61, 24, 63, 25, 65, 0, 67, 0, 69, 0, 71, 0, 73, 0, 75, 0, 77, 0, 79, 0, 81, 0, 83, 0, 85, 26, 87, 27, 89, 28, 91, 29, 93, 30, 95, 31, 97, 32, 99, 33, 101, 34, 103, 35, 105, 36, 107, 37, 109, 38, 111, 39, 113, 40, 115, 41, 117, 42, 119, 43, 121, 44, 123, 45, 125, 46, 127, 47, 129, 48, 131, 49, 133, 50, 135, 51, 137, 52, 139, 53, 141, 54, 143, 55, 145, 56, 147, 57, 149, 58, 151, 59, 153, 60, 155, 61, 157, 62, 159, 63, 161, 0, 163, 64, 165, 65, 167, 66, 169, 67, 171, 0, 173, 68, 175, 69, 177, 70, 179, 71, 181, 0, 183, 0, 185, 72, 187, 73, 189, 74, 191, 0, 193, 0, 195, 0, 197, 0, 199, 0, 201, 0, 203, 75, 205, 0, 207, 76, 209, 0, 211, 0, 213, 77, 215, 78, 217, 79, 219, 0, 221, 0, 223, 0, 225, 0, 227, 0, 229, 80, 231, 81, 233, 82, 235, 83, 237, 0, 239, 0, 241, 0, 243, 0, 245, 84, 247, 0, 249, 85, 251, 86, 253, 87, 255, 0, 257, 0, 259, 88, 261, 89, 263, 0, 265, 90, 267, 0, 269, 91, 271, 92, 273, 93, 275, 0, 277, 0, 279, 0, 281, 0, 283, 0, 285, 0, 287, 0, 289, 94, 291, 95, 293, 96, 295, 0, 297, 0, 299, 0, 301, 0, 303, 97, 305, 98, 307, 99, 309, 0, 311, 100, 313, 101, 315, 102, 317, 103, 319, 0, 321, 104, 323, 105, 325, 106, 327, 107, 329, 108, 331, 0, 333, 0, 335, 0, 337, 0, 339, 0, 341, 0, 343, 0, 345, 109, 347, 110, 349, 111, 351, 0, 353, 0, 355, 0, 357, 0, 359, 112, 361, 113, 363, 114, 365, 0, 367, 0, 369, 0, 371, 115, 373, 116, 375, 117, 377, 0, 379, 0, 381, 118, 383, 119, 385, 120, 387, 0, 389, 0, 391, 0, 393, 0, 15, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 35, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1455, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0, 0, 1, 63, 1, 0, 0, 0, 1, 85, 1, 0, 0, 0, 1, 87, 1, 0, 0, 0, 1, 89, 1, 0, 0, 0, 1, 91, 1, 0, 0, 0, 1, 93, 1, 0, 0, 0, 1, 95, 1, 0, 0, 0, 1, 97, 1, 0, 0, 0, 1, 99, 1, 0, 0, 0, 1, 101, 1, 0, 0, 0, 1, 103, 1, 0, 0, 0, 1, 105, 1, 0, 0, 0, 1, 107, 1, 0, 0, 0, 1, 109, 1, 0, 0, 0, 1, 111, 1, 0, 0, 0, 1, 113, 1, 0, 0, 0, 1, 115, 1, 0, 0, 0, 1, 117, 1, 0, 0, 0, 1, 119, 1, 0, 0, 0, 1, 121, 1, 0, 0, 0, 1, 123, 1, 0, 0, 0, 1, 125, 1, 0, 0, 0, 1, 127, 1, 0, 0, 0, 1, 129, 1, 0, 0, 0, 1, 131, 1, 0, 0, 0, 1, 133, 1, 0, 0, 0, 1, 135, 1, 0, 0, 0, 1, 137, 1, 0, 0, 0, 1, 139, 1, 0, 0, 0, 1, 141, 1, 0, 0, 0, 1, 143, 1, 0, 0, 0, 1, 145, 1, 0, 0, 0, 1, 147, 1, 0, 0, 0, 1, 149, 1, 0, 0, 0, 1, 151, 1, 0, 0, 0, 1, 153, 1, 0, 0, 0, 1, 155, 1, 0, 0, 0, 1, 157, 1, 0, 0, 0, 1, 159, 1, 0, 0, 0, 1, 161, 1, 0, 0, 0, 1, 163, 1, 0, 0, 0, 1, 165, 1, 0, 0, 0, 1, 167, 1, 0, 0, 0, 1, 169, 1, 0, 0, 0, 1, 173, 1, 0, 0, 0, 1, 175, 1, 0, 0, 0, 1, 177, 1, 0, 0, 0, 1, 179, 1, 0, 0, 0, 2, 181, 1, 0, 0, 0, 2, 183, 1, 0, 0, 0, 2, 185, 1, 0, 0, 0, 2, 187, 1, 0, 0, 0, 2, 189, 1, 0, 0, 0, 3, 191, 1, 0, 0, 0, 3, 193, 1, 0, 0, 0, 3, 195, 1, 0, 0, 0, 3, 197, 1, 0, 0, 0, 3, 199, 1, 0, 0, 0, 3, 201, 1, 0, 0, 0, 3, 203, 1, 0, 0, 0, 3, 207, 1, 0, 0, 0, 3, 209, 1, 0, 0, 0, 3, 211, 1, 0, 0, 0, 3, 213, 1, 0, 0, 0, 3, 215, 1, 0, 0, 0, 3, 217, 1, 0, 0, 0, 4, 219, 1, 0, 0, 0, 4, 221, 1, 0, 0, 0, 4, 223, 1, 0, 0, 0, 4, 229, 1, 0, 0, 0, 4, 231, 1, 0, 0, 0, 4, 233, 1, 0, 0, 0, 4, 235, 1, 0, 0, 0, 5, 237, 1, 0, 0, 0, 5, 239, 1, 0, 0, 0, 5, 241, 1, 0, 0, 0, 5, 243, 1, 0, 0, 0, 5, 245, 1, 0, 0, 0, 5, 247, 1, 0, 0, 0, 5, 249, 1, 0, 0, 0, 5, 251, 1, 0, 0, 0, 5, 253, 1, 0, 0, 0, 6, 255, 1, 0, 0, 0, 6, 257, 1, 0, 0, 0, 6, 259, 1, 0, 0, 0, 6, 261, 1, 0, 0, 0, 6, 265, 1, 0, 0, 0, 6, 267, 1, 0, 0, 0, 6, 269, 1, 0, 0, 0, 6, 271, 1, 0, 0, 0, 6, 273, 1, 0, 0, 0, 7, 275, 1, 0, 0, 0, 7, 277, 1, 0, 0, 0, 7, 279, 1, 0, 0, 0, 7, 281, 1, 0, 0, 0, 7, 283, 1, 0, 0, 0, 7, 285, 1, 0, 0, 0, 7, 287, 1, 0, 0, 0, 7, 289, 1, 0, 0, 0, 7, 291, 1, 0, 0, 0, 7, 293, 1, 0, 0, 0, 8, 295, 1, 0, 0, 0, 8, 297, 1, 0, 0, 0, 8, 299, 1, 0, 0, 0, 8, 301, 1, 0, 0, 0, 8, 303, 1, 0, 0, 0, 8, 305, 1, 0, 0, 0, 8, 307, 1, 0, 0, 0, 9, 309, 1, 0, 0, 0, 9, 311, 1, 0, 0, 0, 9, 313, 1, 0, 0, 0, 9, 315, 1, 0, 0, 0, 9, 317, 1, 0, 0, 0, 10, 319, 1, 0, 0, 0, 10, 321, 1, 0, 0, 0, 10, 323, 1, 0, 0, 0, 10, 325, 1, 0, 0, 0, 10, 327, 1, 0, 0, 0, 10, 329, 1, 0, 0, 0, 11, 331, 1, 0, 0, 0, 11, 333, 1, 0, 0, 0, 11, 335, 1, 0, 0, 0, 11, 337, 1, 0, 0, 0, 11, 339, 1, 0, 0, 0, 11, 341, 1, 0, 0, 0, 11, 343, 1, 0, 0, 0, 11, 345, 1, 0, 0, 0, 11, 347, 1, 0, 0, 0, 11, 349, 1, 0, 0, 0, 12, 351, 1, 0, 0, 0, 12, 353, 1, 0, 0, 0, 12, 355, 1, 0, 0, 0, 12, 357, 1, 0, 0, 0, 12, 359, 1, 0, 0, 0, 12, 361, 1, 0, 0, 0, 12, 363, 1, 0, 0, 0, 13, 365, 1, 0, 0, 0, 13, 367, 1, 0, 0, 0, 13, 369, 1, 0, 0, 0, 13, 371, 1, 0, 0, 0, 13, 373, 1, 0, 0, 0, 13, 375, 1, 0, 0, 0, 14, 377, 1, 0, 0, 0, 14, 379, 1, 0, 0, 0, 14, 381, 1, 0, 0, 0, 14, 383, 1, 0, 0, 0, 14, 385, 1, 0, 0, 0, 14, 387, 1, 0, 0, 0, 14, 389, 1, 0, 0, 0, 14, 391, 1, 0, 0, 0, 14, 393, 1, 0, 0, 0, 15, 395, 1, 0, 0, 0, 17, 405, 1, 0, 0, 0, 19, 412, 1, 0, 0, 0, 21, 421, 1, 0, 0, 0, 23, 428, 1, 0, 0, 0, 25, 438, 1, 0, 0, 0, 27, 445, 1, 0, 0, 0, 29, 452, 1, 0, 0, 0, 31, 459, 1, 0, 0, 0, 33, 467, 1, 0, 0, 0, 35, 479, 1, 0, 0, 0, 37, 488, 1, 0, 0, 0, 39, 494, 1, 0, 0, 0, 41, 501, 1, 0, 0, 0, 43, 508, 1, 0, 0, 0, 45, 516, 1, 0, 0, 0, 47, 524, 1, 0, 0, 0, 49, 539, 1, 0, 0, 0, 51, 549, 1, 0, 0, 0, 53, 558, 1, 0, 0, 0, 55, 570, 1, 0, 0, 0, 57, 576, 1, 0, 0, 0, 59, 593, 1, 0, 0, 0, 61, 609, 1, 0, 0, 0, 63, 615, 1, 0, 0, 0, 65, 619, 1, 0, 0, 0, 67, 621, 1, 0, 0, 0, 69, 623, 1, 0, 0, 0, 71, 626, 1, 0, 0, 0, 73, 628, 1, 0, 0, 0, 75, 637, 1, 0, 0, 0, 77, 639, 1, 0, 0, 0, 79, 644, 1, 0, 0, 0, 81, 646, 1, 0, 0, 0, 83, 651, 1, 0, 0, 0, 85, 682, 1, 0, 0, 0, 87, 685, 1, 0, 0, 0, 89, 731, 1, 0, 0, 0, 91, 733, 1, 0, 0, 0, 93, 736, 1, 0, 0, 0, 95, 740, 1, 0, 0, 0, 97, 744, 1, 0, 0, 0, 99, 746, 1, 0, 0, 0, 101, 749, 1, 0, 0, 0, 103, 751, 1, 0, 0, 0, 105, 756, 1, 0, 0, 0, 107, 758, 1, 0, 0, 0, 109, 764, 1, 0, 0, 0, 111, 770, 1, 0, 0, 0, 113, 773, 1, 0, 0, 0, 115, 776, 1, 0, 0, 0, 117, 781, 1, 0, 0, 0, 119, 786, 1, 0, 0, 0, 121, 788, 1, 0, 0, 0, 123, 792, 1, 0, 0, 0, 125, 797, 1, 0, 0, 0, 127, 803, 1, 0, 0, 0, 129, 806, 1, 0, 0, 0, 131, 808, 1, 0, 0, 0, 133, 814, 1, 0, 0, 0, 135, 816, 1, 0, 0, 0, 137, 821, 1, 0, 0, 0, 139, 824, 1, 0, 0, 0, 141, 827, 1, 0, 0, 0, 143, 830, 1, 0, 0, 0, 145, 832, 1, 0, 0, 0, 147, 835, 1, 0, 0, 0, 149, 837, 1, 0, 0, 0, 151, 840, 1, 0, 0, 0, 153, 842, 1, 0, 0, 0, 155, 844, 1, 0, 0, 0, 157, 846, 1, 0, 0, 0, 159, 848, 1, 0, 0, 0, 161, 850, 1, 0, 0, 0, 163, 872, 1, 0, 0, 0, 165, 874, 1, 0, 0, 0, 167, 879, 1, 0, 0, 0, 169, 900, 1, 0, 0, 0, 171, 902, 1, 0, 0, 0, 173, 910, 1, 0, 0, 0, 175, 912, 1, 0, 0, 0, 177, 916, 1, 0, 0, 0, 179, 920, 1, 0, 0, 0, 181, 924, 1, 0, 0, 0, 183, 929, 1, 0, 0, 0, 185, 934, 1, 0, 0, 0, 187, 938, 1, 0, 0, 0, 189, 942, 1, 0, 0, 0, 191, 946, 1, 0, 0, 0, 193, 951, 1, 0, 0, 0, 195, 955, 1, 0, 0, 0, 197, 959, 1, 0, 0, 0, 199, 963, 1, 0, 0, 0, 201, 967, 1, 0, 0, 0, 203, 971, 1, 0, 0, 0, 205, 983, 1, 0, 0, 0, 207, 986, 1, 0, 0, 0, 209, 990, 1, 0, 0, 0, 211, 994, 1, 0, 0, 0, 213, 998, 1, 0, 0, 0, 215, 1002, 1, 0, 0, 0, 217, 1006, 1, 0, 0, 0, 219, 1010, 1, 0, 0, 0, 221, 1015, 1, 0, 0, 0, 223, 1019, 1, 0, 0, 0, 225, 1027, 1, 0, 0, 0, 227, 1048, 1, 0, 0, 0, 229, 1052, 1, 0, 0, 0, 231, 1056, 1, 0, 0, 0, 233, 1060, 1, 0, 0, 0, 235, 1064, 1, 0, 0, 0, 237, 1068, 1, 0, 0, 0, 239, 1073, 1, 0, 0, 0, 241, 1077, 1, 0, 0, 0, 243, 1081, 1, 0, 0, 0, 245, 1085, 1, 0, 0, 0, 247, 1088, 1, 0, 0, 0, 249, 1092, 1, 0, 0, 0, 251, 1096, 1, 0, 0, 0, 253, 1100, 1, 0, 0, 0, 255, 1104, 1, 0, 0, 0, 257, 1109, 1, 0, 0, 0, 259, 1114, 1, 0, 0, 0, 261, 1119, 1, 0, 0, 0, 263, 1126, 1, 0, 0, 0, 265, 1135, 1, 0, 0, 0, 267, 1142, 1, 0, 0, 0, 269, 1146, 1, 0, 0, 0, 271, 1150, 1, 0, 0, 0, 273, 1154, 1, 0, 0, 0, 275, 1158, 1, 0, 0, 0, 277, 1164, 1, 0, 0, 0, 279, 1168, 1, 0, 0, 0, 281, 1172, 1, 0, 0, 0, 283, 1176, 1, 0, 0, 0, 285, 1180, 1, 0, 0, 0, 287, 1184, 1, 0, 0, 0, 289, 1188, 1, 0, 0, 0, 291, 1192, 1, 0, 0, 0, 293, 1196, 1, 0, 0, 0, 295, 1200, 1, 0, 0, 0, 297, 1205, 1, 0, 0, 0, 299, 1209, 1, 0, 0, 0, 301, 1213, 1, 0, 0, 0, 303, 1217, 1, 0, 0, 0, 305, 1221, 1, 0, 0, 0, 307, 1225, 1, 0, 0, 0, 309, 1229, 1, 0, 0, 0, 311, 1234, 1, 0, 0, 0, 313, 1239, 1, 0, 0, 0, 315, 1243, 1, 0, 0, 0, 317, 1247, 1, 0, 0, 0, 319, 1251, 1, 0, 0, 0, 321, 1256, 1, 0, 0, 0, 323, 1263, 1, 0, 0, 0, 325, 1267, 1, 0, 0, 0, 327, 1271, 1, 0, 0, 0, 329, 1275, 1, 0, 0, 0, 331, 1279, 1, 0, 0, 0, 333, 1284, 1, 0, 0, 0, 335, 1288, 1, 0, 0, 0, 337, 1292, 1, 0, 0, 0, 339, 1296, 1, 0, 0, 0, 341, 1301, 1, 0, 0, 0, 343, 1305, 1, 0, 0, 0, 345, 1309, 1, 0, 0, 0, 347, 1313, 1, 0, 0, 0, 349, 1317, 1, 0, 0, 0, 351, 1321, 1, 0, 0, 0, 353, 1327, 1, 0, 0, 0, 355, 1331, 1, 0, 0, 0, 357, 1335, 1, 0, 0, 0, 359, 1339, 1, 0, 0, 0, 361, 1343, 1, 0, 0, 0, 363, 1347, 1, 0, 0, 0, 365, 1351, 1, 0, 0, 0, 367, 1356, 1, 0, 0, 0, 369, 1362, 1, 0, 0, 0, 371, 1368, 1, 0, 0, 0, 373, 1372, 1, 0, 0, 0, 375, 1376, 1, 0, 0, 0, 377, 1380, 1, 0, 0, 0, 379, 1386, 1, 0, 0, 0, 381, 1392, 1, 0, 0, 0, 383, 1396, 1, 0, 0, 0, 385, 1400, 1, 0, 0, 0, 387, 1404, 1, 0, 0, 0, 389, 1410, 1, 0, 0, 0, 391, 1416, 1, 0, 0, 0, 393, 1422, 1, 0, 0, 0, 395, 396, 7, 0, 0, 0, 396, 397, 7, 1, 0, 0, 397, 398, 7, 2, 0, 0, 398, 399, 7, 2, 0, 0, 399, 400, 7, 3, 0, 0, 400, 401, 7, 4, 0, 0, 401, 402, 7, 5, 0, 0, 402, 403, 1, 0, 0, 0, 403, 404, 6, 0, 0, 0, 404, 16, 1, 0, 0, 0, 405, 406, 7, 0, 0, 0, 406, 407, 7, 6, 0, 0, 407, 408, 7, 7, 0, 0, 408, 409, 7, 8, 0, 0, 409, 410, 1, 0, 0, 0, 410, 411, 6, 1, 1, 0, 411, 18, 1, 0, 0, 0, 412, 413, 7, 3, 0, 0, 413, 414, 7, 9, 0, 0, 414, 415, 7, 6, 0, 0, 415, 416, 7, 1, 0, 0, 416, 417, 7, 4, 0, 0, 417, 418, 7, 10, 0, 0, 418, 419, 1, 0, 0, 0, 419, 420, 6, 2, 2, 0, 420, 20, 1, 0, 0, 0, 421, 422, 7, 3, 0, 0, 422, 423, 7, 11, 0, 0, 423, 424, 7, 12, 0, 0, 424, 425, 7, 13, 0, 0, 425, 426, 1, 0, 0, 0, 426, 427, 6, 3, 0, 0, 427, 22, 1, 0, 0, 0, 428, 429, 7, 3, 0, 0, 429, 430, 7, 14, 0, 0, 430, 431, 7, 8, 0, 0, 431, 432, 7, 13, 0, 0, 432, 433, 7, 12, 0, 0, 433, 434, 7, 1, 0, 0, 434, 435, 7, 9, 0, 0, 435, 436, 1, 0, 0, 0, 436, 437, 6, 4, 3, 0, 437, 24, 1, 0, 0, 0, 438, 439, 7, 15, 0, 0, 439, 440, 7, 6, 0, 0, 440, 441, 7, 7, 0, 0, 441, 442, 7, 16, 0, 0, 442, 443, 1, 0, 0, 0, 443, 444, 6, 5, 4, 0, 444, 26, 1, 0, 0, 0, 445, 446, 7, 17, 0, 0, 446, 447, 7, 6, 0, 0, 447, 448, 7, 7, 0, 0, 448, 449, 7, 18, 0, 0, 449, 450, 1, 0, 0, 0, 450, 451, 6, 6, 0, 0, 451, 28, 1, 0, 0, 0, 452, 453, 7, 18, 0, 0, 453, 454, 7, 3, 0, 0, 454, 455, 7, 3, 0, 0, 455, 456, 7, 8, 0, 0, 456, 457, 1, 0, 0, 0, 457, 458, 6, 7, 1, 0, 458, 30, 1, 0, 0, 0, 459, 460, 7, 13, 0, 0, 460, 461, 7, 1, 0, 0, 461, 462, 7, 16, 0, 0, 462, 463, 7, 1, 0, 0, 463, 464, 7, 5, 0, 0, 464, 465, 1, 0, 0, 0, 465, 466, 6, 8, 0, 0, 466, 32, 1, 0, 0, 0, 467, 468, 7, 16, 0, 0, 468, 469, 7, 11, 0, 0, 469, 470, 5, 95, 0, 0, 470, 471, 7, 3, 0, 0, 471, 472, 7, 14, 0, 0, 472, 473, 7, 8, 0, 0, 473, 474, 7, 12, 0, 0, 474, 475, 7, 9, 0, 0, 475, 476, 7, 0, 0, 0, 476, 477, 1, 0, 0, 0, 477, 478, 6, 9, 5, 0, 478, 34, 1, 0, 0, 0, 479, 480, 7, 6, 0, 0, 480, 481, 7, 3, 0, 0, 481, 482, 7, 9, 0, 0, 482, 483, 7, 12, 0, 0, 483, 484, 7, 16, 0, 0, 484, 485, 7, 3, 0, 0, 485, 486, 1, 0, 0, 0, 486, 487, 6, 10, 6, 0, 487, 36, 1, 0, 0, 0, 488, 489, 7, 6, 0, 0, 489, 490, 7, 7, 0, 0, 490, 491, 7, 19, 0, 0, 491, 492, 1, 0, 0, 0, 492, 493, 6, 11, 0, 0, 493, 38, 1, 0, 0, 0, 494, 495, 7, 2, 0, 0, 495, 496, 7, 10, 0, 0, 496, 497, 7, 7, 0, 0, 497, 498, 7, 19, 0, 0, 498, 499, 1, 0, 0, 0, 499, 500, 6, 12, 7, 0, 500, 40, 1, 0, 0, 0, 501, 502, 7, 2, 0, 0, 502, 503, 7, 7, 0, 0, 503, 504, 7, 6, 0, 0, 504, 505, 7, 5, 0, 0, 505, 506, 1, 0, 0, 0, 506, 507, 6, 13, 0, 0, 507, 42, 1, 0, 0, 0, 508, 509, 7, 2, 0, 0, 509, 510, 7, 5, 0, 0, 510, 511, 7, 12, 0, 0, 511, 512, 7, 5, 0, 0, 512, 513, 7, 2, 0, 0, 513, 514, 1, 0, 0, 0, 514, 515, 6, 14, 0, 0, 515, 44, 1, 0, 0, 0, 516, 517, 7, 19, 0, 0, 517, 518, 7, 10, 0, 0, 518, 519, 7, 3, 0, 0, 519, 520, 7, 6, 0, 0, 520, 521, 7, 3, 0, 0, 521, 522, 1, 0, 0, 0, 522, 523, 6, 15, 0, 0, 523, 46, 1, 0, 0, 0, 524, 525, 4, 16, 0, 0, 525, 526, 7, 1, 0, 0, 526, 527, 7, 9, 0, 0, 527, 528, 7, 13, 0, 0, 528, 529, 7, 1, 0, 0, 529, 530, 7, 9, 0, 0, 530, 531, 7, 3, 0, 0, 531, 532, 7, 2, 0, 0, 532, 533, 7, 5, 0, 0, 533, 534, 7, 12, 0, 0, 534, 535, 7, 5, 0, 0, 535, 536, 7, 2, 0, 0, 536, 537, 1, 0, 0, 0, 537, 538, 6, 16, 0, 0, 538, 48, 1, 0, 0, 0, 539, 540, 4, 17, 1, 0, 540, 541, 7, 13, 0, 0, 541, 542, 7, 7, 0, 0, 542, 543, 7, 7, 0, 0, 543, 544, 7, 18, 0, 0, 544, 545, 7, 20, 0, 0, 545, 546, 7, 8, 0, 0, 546, 547, 1, 0, 0, 0, 547, 548, 6, 17, 8, 0, 548, 50, 1, 0, 0, 0, 549, 550, 4, 18, 2, 0, 550, 551, 7, 16, 0, 0, 551, 552, 7, 12, 0, 0, 552, 553, 7, 5, 0, 0, 553, 554, 7, 4, 0, 0, 554, 555, 7, 10, 0, 0, 555, 556, 1, 0, 0, 0, 556, 557, 6, 18, 0, 0, 557, 52, 1, 0, 0, 0, 558, 559, 4, 19, 3, 0, 559, 560, 7, 16, 0, 0, 560, 561, 7, 3, 0, 0, 561, 562, 7, 5, 0, 0, 562, 563, 7, 6, 0, 0, 563, 564, 7, 1, 0, 0, 564, 565, 7, 4, 0, 0, 565, 566, 7, 2, 0, 0, 566, 567, 1, 0, 0, 0, 567, 568, 6, 19, 9, 0, 568, 54, 1, 0, 0, 0, 569, 571, 8, 21, 0, 0, 570, 569, 1, 0, 0, 0, 571, 572, 1, 0, 0, 0, 572, 570, 1, 0, 0, 0, 572, 573, 1, 0, 0, 0, 573, 574, 1, 0, 0, 0, 574, 575, 6, 20, 0, 0, 575, 56, 1, 0, 0, 0, 576, 577, 5, 47, 0, 0, 577, 578, 5, 47, 0, 0, 578, 582, 1, 0, 0, 0, 579, 581, 8, 22, 0, 0, 580, 579, 1, 0, 0, 0, 581, 584, 1, 0, 0, 0, 582, 580, 1, 0, 0, 0, 582, 583, 1, 0, 0, 0, 583, 586, 1, 0, 0, 0, 584, 582, 1, 0, 0, 0, 585, 587, 5, 13, 0, 0, 586, 585, 1, 0, 0, 0, 586, 587, 1, 0, 0, 0, 587, 589, 1, 0, 0, 0, 588, 590, 5, 10, 0, 0, 589, 588, 1, 0, 0, 0, 589, 590, 1, 0, 0, 0, 590, 591, 1, 0, 0, 0, 591, 592, 6, 21, 10, 0, 592, 58, 1, 0, 0, 0, 593, 594, 5, 47, 0, 0, 594, 595, 5, 42, 0, 0, 595, 600, 1, 0, 0, 0, 596, 599, 3, 59, 22, 0, 597, 599, 9, 0, 0, 0, 598, 596, 1, 0, 0, 0, 598, 597, 1, 0, 0, 0, 599, 602, 1, 0, 0, 0, 600, 601, 1, 0, 0, 0, 600, 598, 1, 0, 0, 0, 601, 603, 1, 0, 0, 0, 602, 600, 1, 0, 0, 0, 603, 604, 5, 42, 0, 0, 604, 605, 5, 47, 0, 0, 605, 606, 1, 0, 0, 0, 606, 607, 6, 22, 10, 0, 607, 60, 1, 0, 0, 0, 608, 610, 7, 23, 0, 0, 609, 608, 1, 0, 0, 0, 610, 611, 1, 0, 0, 0, 611, 609, 1, 0, 0, 0, 611, 612, 1, 0, 0, 0, 612, 613, 1, 0, 0, 0, 613, 614, 6, 23, 10, 0, 614, 62, 1, 0, 0, 0, 615, 616, 5, 124, 0, 0, 616, 617, 1, 0, 0, 0, 617, 618, 6, 24, 11, 0, 618, 64, 1, 0, 0, 0, 619, 620, 7, 24, 0, 0, 620, 66, 1, 0, 0, 0, 621, 622, 7, 25, 0, 0, 622, 68, 1, 0, 0, 0, 623, 624, 5, 92, 0, 0, 624, 625, 7, 26, 0, 0, 625, 70, 1, 0, 0, 0, 626, 627, 8, 27, 0, 0, 627, 72, 1, 0, 0, 0, 628, 630, 7, 3, 0, 0, 629, 631, 7, 28, 0, 0, 630, 629, 1, 0, 0, 0, 630, 631, 1, 0, 0, 0, 631, 633, 1, 0, 0, 0, 632, 634, 3, 65, 25, 0, 633, 632, 1, 0, 0, 0, 634, 635, 1, 0, 0, 0, 635, 633, 1, 0, 0, 0, 635, 636, 1, 0, 0, 0, 636, 74, 1, 0, 0, 0, 637, 638, 5, 64, 0, 0, 638, 76, 1, 0, 0, 0, 639, 640, 5, 96, 0, 0, 640, 78, 1, 0, 0, 0, 641, 645, 8, 29, 0, 0, 642, 643, 5, 96, 0, 0, 643, 645, 5, 96, 0, 0, 644, 641, 1, 0, 0, 0, 644, 642, 1, 0, 0, 0, 645, 80, 1, 0, 0, 0, 646, 647, 5, 95, 0, 0, 647, 82, 1, 0, 0, 0, 648, 652, 3, 67, 26, 0, 649, 652, 3, 65, 25, 0, 650, 652, 3, 81, 33, 0, 651, 648, 1, 0, 0, 0, 651, 649, 1, 0, 0, 0, 651, 650, 1, 0, 0, 0, 652, 84, 1, 0, 0, 0, 653, 658, 5, 34, 0, 0, 654, 657, 3, 69, 27, 0, 655, 657, 3, 71, 28, 0, 656, 654, 1, 0, 0, 0, 656, 655, 1, 0, 0, 0, 657, 660, 1, 0, 0, 0, 658, 656, 1, 0, 0, 0, 658, 659, 1, 0, 0, 0, 659, 661, 1, 0, 0, 0, 660, 658, 1, 0, 0, 0, 661, 683, 5, 34, 0, 0, 662, 663, 5, 34, 0, 0, 663, 664, 5, 34, 0, 0, 664, 665, 5, 34, 0, 0, 665, 669, 1, 0, 0, 0, 666, 668, 8, 22, 0, 0, 667, 666, 1, 0, 0, 0, 668, 671, 1, 0, 0, 0, 669, 670, 1, 0, 0, 0, 669, 667, 1, 0, 0, 0, 670, 672, 1, 0, 0, 0, 671, 669, 1, 0, 0, 0, 672, 673, 5, 34, 0, 0, 673, 674, 5, 34, 0, 0, 674, 675, 5, 34, 0, 0, 675, 677, 1, 0, 0, 0, 676, 678, 5, 34, 0, 0, 677, 676, 1, 0, 0, 0, 677, 678, 1, 0, 0, 0, 678, 680, 1, 0, 0, 0, 679, 681, 5, 34, 0, 0, 680, 679, 1, 0, 0, 0, 680, 681, 1, 0, 0, 0, 681, 683, 1, 0, 0, 0, 682, 653, 1, 0, 0, 0, 682, 662, 1, 0, 0, 0, 683, 86, 1, 0, 0, 0, 684, 686, 3, 65, 25, 0, 685, 684, 1, 0, 0, 0, 686, 687, 1, 0, 0, 0, 687, 685, 1, 0, 0, 0, 687, 688, 1, 0, 0, 0, 688, 88, 1, 0, 0, 0, 689, 691, 3, 65, 25, 0, 690, 689, 1, 0, 0, 0, 691, 692, 1, 0, 0, 0, 692, 690, 1, 0, 0, 0, 692, 693, 1, 0, 0, 0, 693, 694, 1, 0, 0, 0, 694, 698, 3, 105, 45, 0, 695, 697, 3, 65, 25, 0, 696, 695, 1, 0, 0, 0, 697, 700, 1, 0, 0, 0, 698, 696, 1, 0, 0, 0, 698, 699, 1, 0, 0, 0, 699, 732, 1, 0, 0, 0, 700, 698, 1, 0, 0, 0, 701, 703, 3, 105, 45, 0, 702, 704, 3, 65, 25, 0, 703, 702, 1, 0, 0, 0, 704, 705, 1, 0, 0, 0, 705, 703, 1, 0, 0, 0, 705, 706, 1, 0, 0, 0, 706, 732, 1, 0, 0, 0, 707, 709, 3, 65, 25, 0, 708, 707, 1, 0, 0, 0, 709, 710, 1, 0, 0, 0, 710, 708, 1, 0, 0, 0, 710, 711, 1, 0, 0, 0, 711, 719, 1, 0, 0, 0, 712, 716, 3, 105, 45, 0, 713, 715, 3, 65, 25, 0, 714, 713, 1, 0, 0, 0, 715, 718, 1, 0, 0, 0, 716, 714, 1, 0, 0, 0, 716, 717, 1, 0, 0, 0, 717, 720, 1, 0, 0, 0, 718, 716, 1, 0, 0, 0, 719, 712, 1, 0, 0, 0, 719, 720, 1, 0, 0, 0, 720, 721, 1, 0, 0, 0, 721, 722, 3, 73, 29, 0, 722, 732, 1, 0, 0, 0, 723, 725, 3, 105, 45, 0, 724, 726, 3, 65, 25, 0, 725, 724, 1, 0, 0, 0, 726, 727, 1, 0, 0, 0, 727, 725, 1, 0, 0, 0, 727, 728, 1, 0, 0, 0, 728, 729, 1, 0, 0, 0, 729, 730, 3, 73, 29, 0, 730, 732, 1, 0, 0, 0, 731, 690, 1, 0, 0, 0, 731, 701, 1, 0, 0, 0, 731, 708, 1, 0, 0, 0, 731, 723, 1, 0, 0, 0, 732, 90, 1, 0, 0, 0, 733, 734, 7, 30, 0, 0, 734, 735, 7, 31, 0, 0, 735, 92, 1, 0, 0, 0, 736, 737, 7, 12, 0, 0, 737, 738, 7, 9, 0, 0, 738, 739, 7, 0, 0, 0, 739, 94, 1, 0, 0, 0, 740, 741, 7, 12, 0, 0, 741, 742, 7, 2, 0, 0, 742, 743, 7, 4, 0, 0, 743, 96, 1, 0, 0, 0, 744, 745, 5, 61, 0, 0, 745, 98, 1, 0, 0, 0, 746, 747, 5, 58, 0, 0, 747, 748, 5, 58, 0, 0, 748, 100, 1, 0, 0, 0, 749, 750, 5, 44, 0, 0, 750, 102, 1, 0, 0, 0, 751, 752, 7, 0, 0, 0, 752, 753, 7, 3, 0, 0, 753, 754, 7, 2, 0, 0, 754, 755, 7, 4, 0, 0, 755, 104, 1, 0, 0, 0, 756, 757, 5, 46, 0, 0, 757, 106, 1, 0, 0, 0, 758, 759, 7, 15, 0, 0, 759, 760, 7, 12, 0, 0, 760, 761, 7, 13, 0, 0, 761, 762, 7, 2, 0, 0, 762, 763, 7, 3, 0, 0, 763, 108, 1, 0, 0, 0, 764, 765, 7, 15, 0, 0, 765, 766, 7, 1, 0, 0, 766, 767, 7, 6, 0, 0, 767, 768, 7, 2, 0, 0, 768, 769, 7, 5, 0, 0, 769, 110, 1, 0, 0, 0, 770, 771, 7, 1, 0, 0, 771, 772, 7, 9, 0, 0, 772, 112, 1, 0, 0, 0, 773, 774, 7, 1, 0, 0, 774, 775, 7, 2, 0, 0, 775, 114, 1, 0, 0, 0, 776, 777, 7, 13, 0, 0, 777, 778, 7, 12, 0, 0, 778, 779, 7, 2, 0, 0, 779, 780, 7, 5, 0, 0, 780, 116, 1, 0, 0, 0, 781, 782, 7, 13, 0, 0, 782, 783, 7, 1, 0, 0, 783, 784, 7, 18, 0, 0, 784, 785, 7, 3, 0, 0, 785, 118, 1, 0, 0, 0, 786, 787, 5, 40, 0, 0, 787, 120, 1, 0, 0, 0, 788, 789, 7, 9, 0, 0, 789, 790, 7, 7, 0, 0, 790, 791, 7, 5, 0, 0, 791, 122, 1, 0, 0, 0, 792, 793, 7, 9, 0, 0, 793, 794, 7, 20, 0, 0, 794, 795, 7, 13, 0, 0, 795, 796, 7, 13, 0, 0, 796, 124, 1, 0, 0, 0, 797, 798, 7, 9, 0, 0, 798, 799, 7, 20, 0, 0, 799, 800, 7, 13, 0, 0, 800, 801, 7, 13, 0, 0, 801, 802, 7, 2, 0, 0, 802, 126, 1, 0, 0, 0, 803, 804, 7, 7, 0, 0, 804, 805, 7, 6, 0, 0, 805, 128, 1, 0, 0, 0, 806, 807, 5, 63, 0, 0, 807, 130, 1, 0, 0, 0, 808, 809, 7, 6, 0, 0, 809, 810, 7, 13, 0, 0, 810, 811, 7, 1, 0, 0, 811, 812, 7, 18, 0, 0, 812, 813, 7, 3, 0, 0, 813, 132, 1, 0, 0, 0, 814, 815, 5, 41, 0, 0, 815, 134, 1, 0, 0, 0, 816, 817, 7, 5, 0, 0, 817, 818, 7, 6, 0, 0, 818, 819, 7, 20, 0, 0, 819, 820, 7, 3, 0, 0, 820, 136, 1, 0, 0, 0, 821, 822, 5, 61, 0, 0, 822, 823, 5, 61, 0, 0, 823, 138, 1, 0, 0, 0, 824, 825, 5, 61, 0, 0, 825, 826, 5, 126, 0, 0, 826, 140, 1, 0, 0, 0, 827, 828, 5, 33, 0, 0, 828, 829, 5, 61, 0, 0, 829, 142, 1, 0, 0, 0, 830, 831, 5, 60, 0, 0, 831, 144, 1, 0, 0, 0, 832, 833, 5, 60, 0, 0, 833, 834, 5, 61, 0, 0, 834, 146, 1, 0, 0, 0, 835, 836, 5, 62, 0, 0, 836, 148, 1, 0, 0, 0, 837, 838, 5, 62, 0, 0, 838, 839, 5, 61, 0, 0, 839, 150, 1, 0, 0, 0, 840, 841, 5, 43, 0, 0, 841, 152, 1, 0, 0, 0, 842, 843, 5, 45, 0, 0, 843, 154, 1, 0, 0, 0, 844, 845, 5, 42, 0, 0, 845, 156, 1, 0, 0, 0, 846, 847, 5, 47, 0, 0, 847, 158, 1, 0, 0, 0, 848, 849, 5, 37, 0, 0, 849, 160, 1, 0, 0, 0, 850, 851, 4, 73, 4, 0, 851, 852, 3, 51, 18, 0, 852, 853, 1, 0, 0, 0, 853, 854, 6, 73, 12, 0, 854, 162, 1, 0, 0, 0, 855, 858, 3, 129, 57, 0, 856, 859, 3, 67, 26, 0, 857, 859, 3, 81, 33, 0, 858, 856, 1, 0, 0, 0, 858, 857, 1, 0, 0, 0, 859, 863, 1, 0, 0, 0, 860, 862, 3, 83, 34, 0, 861, 860, 1, 0, 0, 0, 862, 865, 1, 0, 0, 0, 863, 861, 1, 0, 0, 0, 863, 864, 1, 0, 0, 0, 864, 873, 1, 0, 0, 0, 865, 863, 1, 0, 0, 0, 866, 868, 3, 129, 57, 0, 867, 869, 3, 65, 25, 0, 868, 867, 1, 0, 0, 0, 869, 870, 1, 0, 0, 0, 870, 868, 1, 0, 0, 0, 870, 871, 1, 0, 0, 0, 871, 873, 1, 0, 0, 0, 872, 855, 1, 0, 0, 0, 872, 866, 1, 0, 0, 0, 873, 164, 1, 0, 0, 0, 874, 875, 5, 91, 0, 0, 875, 876, 1, 0, 0, 0, 876, 877, 6, 75, 0, 0, 877, 878, 6, 75, 0, 0, 878, 166, 1, 0, 0, 0, 879, 880, 5, 93, 0, 0, 880, 881, 1, 0, 0, 0, 881, 882, 6, 76, 11, 0, 882, 883, 6, 76, 11, 0, 883, 168, 1, 0, 0, 0, 884, 888, 3, 67, 26, 0, 885, 887, 3, 83, 34, 0, 886, 885, 1, 0, 0, 0, 887, 890, 1, 0, 0, 0, 888, 886, 1, 0, 0, 0, 888, 889, 1, 0, 0, 0, 889, 901, 1, 0, 0, 0, 890, 888, 1, 0, 0, 0, 891, 894, 3, 81, 33, 0, 892, 894, 3, 75, 30, 0, 893, 891, 1, 0, 0, 0, 893, 892, 1, 0, 0, 0, 894, 896, 1, 0, 0, 0, 895, 897, 3, 83, 34, 0, 896, 895, 1, 0, 0, 0, 897, 898, 1, 0, 0, 0, 898, 896, 1, 0, 0, 0, 898, 899, 1, 0, 0, 0, 899, 901, 1, 0, 0, 0, 900, 884, 1, 0, 0, 0, 900, 893, 1, 0, 0, 0, 901, 170, 1, 0, 0, 0, 902, 904, 3, 77, 31, 0, 903, 905, 3, 79, 32, 0, 904, 903, 1, 0, 0, 0, 905, 906, 1, 0, 0, 0, 906, 904, 1, 0, 0, 0, 906, 907, 1, 0, 0, 0, 907, 908, 1, 0, 0, 0, 908, 909, 3, 77, 31, 0, 909, 172, 1, 0, 0, 0, 910, 911, 3, 171, 78, 0, 911, 174, 1, 0, 0, 0, 912, 913, 3, 57, 21, 0, 913, 914, 1, 0, 0, 0, 914, 915, 6, 80, 10, 0, 915, 176, 1, 0, 0, 0, 916, 917, 3, 59, 22, 0, 917, 918, 1, 0, 0, 0, 918, 919, 6, 81, 10, 0, 919, 178, 1, 0, 0, 0, 920, 921, 3, 61, 23, 0, 921, 922, 1, 0, 0, 0, 922, 923, 6, 82, 10, 0, 923, 180, 1, 0, 0, 0, 924, 925, 3, 165, 75, 0, 925, 926, 1, 0, 0, 0, 926, 927, 6, 83, 13, 0, 927, 928, 6, 83, 14, 0, 928, 182, 1, 0, 0, 0, 929, 930, 3, 63, 24, 0, 930, 931, 1, 0, 0, 0, 931, 932, 6, 84, 15, 0, 932, 933, 6, 84, 11, 0, 933, 184, 1, 0, 0, 0, 934, 935, 3, 61, 23, 0, 935, 936, 1, 0, 0, 0, 936, 937, 6, 85, 10, 0, 937, 186, 1, 0, 0, 0, 938, 939, 3, 57, 21, 0, 939, 940, 1, 0, 0, 0, 940, 941, 6, 86, 10, 0, 941, 188, 1, 0, 0, 0, 942, 943, 3, 59, 22, 0, 943, 944, 1, 0, 0, 0, 944, 945, 6, 87, 10, 0, 945, 190, 1, 0, 0, 0, 946, 947, 3, 63, 24, 0, 947, 948, 1, 0, 0, 0, 948, 949, 6, 88, 15, 0, 949, 950, 6, 88, 11, 0, 950, 192, 1, 0, 0, 0, 951, 952, 3, 165, 75, 0, 952, 953, 1, 0, 0, 0, 953, 954, 6, 89, 13, 0, 954, 194, 1, 0, 0, 0, 955, 956, 3, 167, 76, 0, 956, 957, 1, 0, 0, 0, 957, 958, 6, 90, 16, 0, 958, 196, 1, 0, 0, 0, 959, 960, 3, 321, 153, 0, 960, 961, 1, 0, 0, 0, 961, 962, 6, 91, 17, 0, 962, 198, 1, 0, 0, 0, 963, 964, 3, 101, 43, 0, 964, 965, 1, 0, 0, 0, 965, 966, 6, 92, 18, 0, 966, 200, 1, 0, 0, 0, 967, 968, 3, 97, 41, 0, 968, 969, 1, 0, 0, 0, 969, 970, 6, 93, 19, 0, 970, 202, 1, 0, 0, 0, 971, 972, 7, 16, 0, 0, 972, 973, 7, 3, 0, 0, 973, 974, 7, 5, 0, 0, 974, 975, 7, 12, 0, 0, 975, 976, 7, 0, 0, 0, 976, 977, 7, 12, 0, 0, 977, 978, 7, 5, 0, 0, 978, 979, 7, 12, 0, 0, 979, 204, 1, 0, 0, 0, 980, 984, 8, 32, 0, 0, 981, 982, 5, 47, 0, 0, 982, 984, 8, 33, 0, 0, 983, 980, 1, 0, 0, 0, 983, 981, 1, 0, 0, 0, 984, 206, 1, 0, 0, 0, 985, 987, 3, 205, 95, 0, 986, 985, 1, 0, 0, 0, 987, 988, 1, 0, 0, 0, 988, 986, 1, 0, 0, 0, 988, 989, 1, 0, 0, 0, 989, 208, 1, 0, 0, 0, 990, 991, 3, 207, 96, 0, 991, 992, 1, 0, 0, 0, 992, 993, 6, 97, 20, 0, 993, 210, 1, 0, 0, 0, 994, 995, 3, 85, 35, 0, 995, 996, 1, 0, 0, 0, 996, 997, 6, 98, 21, 0, 997, 212, 1, 0, 0, 0, 998, 999, 3, 57, 21, 0, 999, 1000, 1, 0, 0, 0, 1000, 1001, 6, 99, 10, 0, 1001, 214, 1, 0, 0, 0, 1002, 1003, 3, 59, 22, 0, 1003, 1004, 1, 0, 0, 0, 1004, 1005, 6, 100, 10, 0, 1005, 216, 1, 0, 0, 0, 1006, 1007, 3, 61, 23, 0, 1007, 1008, 1, 0, 0, 0, 1008, 1009, 6, 101, 10, 0, 1009, 218, 1, 0, 0, 0, 1010, 1011, 3, 63, 24, 0, 1011, 1012, 1, 0, 0, 0, 1012, 1013, 6, 102, 15, 0, 1013, 1014, 6, 102, 11, 0, 1014, 220, 1, 0, 0, 0, 1015, 1016, 3, 105, 45, 0, 1016, 1017, 1, 0, 0, 0, 1017, 1018, 6, 103, 22, 0, 1018, 222, 1, 0, 0, 0, 1019, 1020, 3, 101, 43, 0, 1020, 1021, 1, 0, 0, 0, 1021, 1022, 6, 104, 18, 0, 1022, 224, 1, 0, 0, 0, 1023, 1028, 3, 67, 26, 0, 1024, 1028, 3, 65, 25, 0, 1025, 1028, 3, 81, 33, 0, 1026, 1028, 3, 155, 70, 0, 1027, 1023, 1, 0, 0, 0, 1027, 1024, 1, 0, 0, 0, 1027, 1025, 1, 0, 0, 0, 1027, 1026, 1, 0, 0, 0, 1028, 226, 1, 0, 0, 0, 1029, 1032, 3, 67, 26, 0, 1030, 1032, 3, 155, 70, 0, 1031, 1029, 1, 0, 0, 0, 1031, 1030, 1, 0, 0, 0, 1032, 1036, 1, 0, 0, 0, 1033, 1035, 3, 225, 105, 0, 1034, 1033, 1, 0, 0, 0, 1035, 1038, 1, 0, 0, 0, 1036, 1034, 1, 0, 0, 0, 1036, 1037, 1, 0, 0, 0, 1037, 1049, 1, 0, 0, 0, 1038, 1036, 1, 0, 0, 0, 1039, 1042, 3, 81, 33, 0, 1040, 1042, 3, 75, 30, 0, 1041, 1039, 1, 0, 0, 0, 1041, 1040, 1, 0, 0, 0, 1042, 1044, 1, 0, 0, 0, 1043, 1045, 3, 225, 105, 0, 1044, 1043, 1, 0, 0, 0, 1045, 1046, 1, 0, 0, 0, 1046, 1044, 1, 0, 0, 0, 1046, 1047, 1, 0, 0, 0, 1047, 1049, 1, 0, 0, 0, 1048, 1031, 1, 0, 0, 0, 1048, 1041, 1, 0, 0, 0, 1049, 228, 1, 0, 0, 0, 1050, 1053, 3, 227, 106, 0, 1051, 1053, 3, 171, 78, 0, 1052, 1050, 1, 0, 0, 0, 1052, 1051, 1, 0, 0, 0, 1053, 1054, 1, 0, 0, 0, 1054, 1052, 1, 0, 0, 0, 1054, 1055, 1, 0, 0, 0, 1055, 230, 1, 0, 0, 0, 1056, 1057, 3, 57, 21, 0, 1057, 1058, 1, 0, 0, 0, 1058, 1059, 6, 108, 10, 0, 1059, 232, 1, 0, 0, 0, 1060, 1061, 3, 59, 22, 0, 1061, 1062, 1, 0, 0, 0, 1062, 1063, 6, 109, 10, 0, 1063, 234, 1, 0, 0, 0, 1064, 1065, 3, 61, 23, 0, 1065, 1066, 1, 0, 0, 0, 1066, 1067, 6, 110, 10, 0, 1067, 236, 1, 0, 0, 0, 1068, 1069, 3, 63, 24, 0, 1069, 1070, 1, 0, 0, 0, 1070, 1071, 6, 111, 15, 0, 1071, 1072, 6, 111, 11, 0, 1072, 238, 1, 0, 0, 0, 1073, 1074, 3, 97, 41, 0, 1074, 1075, 1, 0, 0, 0, 1075, 1076, 6, 112, 19, 0, 1076, 240, 1, 0, 0, 0, 1077, 1078, 3, 101, 43, 0, 1078, 1079, 1, 0, 0, 0, 1079, 1080, 6, 113, 18, 0, 1080, 242, 1, 0, 0, 0, 1081, 1082, 3, 105, 45, 0, 1082, 1083, 1, 0, 0, 0, 1083, 1084, 6, 114, 22, 0, 1084, 244, 1, 0, 0, 0, 1085, 1086, 7, 12, 0, 0, 1086, 1087, 7, 2, 0, 0, 1087, 246, 1, 0, 0, 0, 1088, 1089, 3, 229, 107, 0, 1089, 1090, 1, 0, 0, 0, 1090, 1091, 6, 116, 23, 0, 1091, 248, 1, 0, 0, 0, 1092, 1093, 3, 57, 21, 0, 1093, 1094, 1, 0, 0, 0, 1094, 1095, 6, 117, 10, 0, 1095, 250, 1, 0, 0, 0, 1096, 1097, 3, 59, 22, 0, 1097, 1098, 1, 0, 0, 0, 1098, 1099, 6, 118, 10, 0, 1099, 252, 1, 0, 0, 0, 1100, 1101, 3, 61, 23, 0, 1101, 1102, 1, 0, 0, 0, 1102, 1103, 6, 119, 10, 0, 1103, 254, 1, 0, 0, 0, 1104, 1105, 3, 63, 24, 0, 1105, 1106, 1, 0, 0, 0, 1106, 1107, 6, 120, 15, 0, 1107, 1108, 6, 120, 11, 0, 1108, 256, 1, 0, 0, 0, 1109, 1110, 3, 165, 75, 0, 1110, 1111, 1, 0, 0, 0, 1111, 1112, 6, 121, 13, 0, 1112, 1113, 6, 121, 24, 0, 1113, 258, 1, 0, 0, 0, 1114, 1115, 7, 7, 0, 0, 1115, 1116, 7, 9, 0, 0, 1116, 1117, 1, 0, 0, 0, 1117, 1118, 6, 122, 25, 0, 1118, 260, 1, 0, 0, 0, 1119, 1120, 7, 19, 0, 0, 1120, 1121, 7, 1, 0, 0, 1121, 1122, 7, 5, 0, 0, 1122, 1123, 7, 10, 0, 0, 1123, 1124, 1, 0, 0, 0, 1124, 1125, 6, 123, 25, 0, 1125, 262, 1, 0, 0, 0, 1126, 1127, 8, 34, 0, 0, 1127, 264, 1, 0, 0, 0, 1128, 1130, 3, 263, 124, 0, 1129, 1128, 1, 0, 0, 0, 1130, 1131, 1, 0, 0, 0, 1131, 1129, 1, 0, 0, 0, 1131, 1132, 1, 0, 0, 0, 1132, 1133, 1, 0, 0, 0, 1133, 1134, 3, 321, 153, 0, 1134, 1136, 1, 0, 0, 0, 1135, 1129, 1, 0, 0, 0, 1135, 1136, 1, 0, 0, 0, 1136, 1138, 1, 0, 0, 0, 1137, 1139, 3, 263, 124, 0, 1138, 1137, 1, 0, 0, 0, 1139, 1140, 1, 0, 0, 0, 1140, 1138, 1, 0, 0, 0, 1140, 1141, 1, 0, 0, 0, 1141, 266, 1, 0, 0, 0, 1142, 1143, 3, 265, 125, 0, 1143, 1144, 1, 0, 0, 0, 1144, 1145, 6, 126, 26, 0, 1145, 268, 1, 0, 0, 0, 1146, 1147, 3, 57, 21, 0, 1147, 1148, 1, 0, 0, 0, 1148, 1149, 6, 127, 10, 0, 1149, 270, 1, 0, 0, 0, 1150, 1151, 3, 59, 22, 0, 1151, 1152, 1, 0, 0, 0, 1152, 1153, 6, 128, 10, 0, 1153, 272, 1, 0, 0, 0, 1154, 1155, 3, 61, 23, 0, 1155, 1156, 1, 0, 0, 0, 1156, 1157, 6, 129, 10, 0, 1157, 274, 1, 0, 0, 0, 1158, 1159, 3, 63, 24, 0, 1159, 1160, 1, 0, 0, 0, 1160, 1161, 6, 130, 15, 0, 1161, 1162, 6, 130, 11, 0, 1162, 1163, 6, 130, 11, 0, 1163, 276, 1, 0, 0, 0, 1164, 1165, 3, 97, 41, 0, 1165, 1166, 1, 0, 0, 0, 1166, 1167, 6, 131, 19, 0, 1167, 278, 1, 0, 0, 0, 1168, 1169, 3, 101, 43, 0, 1169, 1170, 1, 0, 0, 0, 1170, 1171, 6, 132, 18, 0, 1171, 280, 1, 0, 0, 0, 1172, 1173, 3, 105, 45, 0, 1173, 1174, 1, 0, 0, 0, 1174, 1175, 6, 133, 22, 0, 1175, 282, 1, 0, 0, 0, 1176, 1177, 3, 261, 123, 0, 1177, 1178, 1, 0, 0, 0, 1178, 1179, 6, 134, 27, 0, 1179, 284, 1, 0, 0, 0, 1180, 1181, 3, 229, 107, 0, 1181, 1182, 1, 0, 0, 0, 1182, 1183, 6, 135, 23, 0, 1183, 286, 1, 0, 0, 0, 1184, 1185, 3, 173, 79, 0, 1185, 1186, 1, 0, 0, 0, 1186, 1187, 6, 136, 28, 0, 1187, 288, 1, 0, 0, 0, 1188, 1189, 3, 57, 21, 0, 1189, 1190, 1, 0, 0, 0, 1190, 1191, 6, 137, 10, 0, 1191, 290, 1, 0, 0, 0, 1192, 1193, 3, 59, 22, 0, 1193, 1194, 1, 0, 0, 0, 1194, 1195, 6, 138, 10, 0, 1195, 292, 1, 0, 0, 0, 1196, 1197, 3, 61, 23, 0, 1197, 1198, 1, 0, 0, 0, 1198, 1199, 6, 139, 10, 0, 1199, 294, 1, 0, 0, 0, 1200, 1201, 3, 63, 24, 0, 1201, 1202, 1, 0, 0, 0, 1202, 1203, 6, 140, 15, 0, 1203, 1204, 6, 140, 11, 0, 1204, 296, 1, 0, 0, 0, 1205, 1206, 3, 105, 45, 0, 1206, 1207, 1, 0, 0, 0, 1207, 1208, 6, 141, 22, 0, 1208, 298, 1, 0, 0, 0, 1209, 1210, 3, 173, 79, 0, 1210, 1211, 1, 0, 0, 0, 1211, 1212, 6, 142, 28, 0, 1212, 300, 1, 0, 0, 0, 1213, 1214, 3, 169, 77, 0, 1214, 1215, 1, 0, 0, 0, 1215, 1216, 6, 143, 29, 0, 1216, 302, 1, 0, 0, 0, 1217, 1218, 3, 57, 21, 0, 1218, 1219, 1, 0, 0, 0, 1219, 1220, 6, 144, 10, 0, 1220, 304, 1, 0, 0, 0, 1221, 1222, 3, 59, 22, 0, 1222, 1223, 1, 0, 0, 0, 1223, 1224, 6, 145, 10, 0, 1224, 306, 1, 0, 0, 0, 1225, 1226, 3, 61, 23, 0, 1226, 1227, 1, 0, 0, 0, 1227, 1228, 6, 146, 10, 0, 1228, 308, 1, 0, 0, 0, 1229, 1230, 3, 63, 24, 0, 1230, 1231, 1, 0, 0, 0, 1231, 1232, 6, 147, 15, 0, 1232, 1233, 6, 147, 11, 0, 1233, 310, 1, 0, 0, 0, 1234, 1235, 7, 1, 0, 0, 1235, 1236, 7, 9, 0, 0, 1236, 1237, 7, 15, 0, 0, 1237, 1238, 7, 7, 0, 0, 1238, 312, 1, 0, 0, 0, 1239, 1240, 3, 57, 21, 0, 1240, 1241, 1, 0, 0, 0, 1241, 1242, 6, 149, 10, 0, 1242, 314, 1, 0, 0, 0, 1243, 1244, 3, 59, 22, 0, 1244, 1245, 1, 0, 0, 0, 1245, 1246, 6, 150, 10, 0, 1246, 316, 1, 0, 0, 0, 1247, 1248, 3, 61, 23, 0, 1248, 1249, 1, 0, 0, 0, 1249, 1250, 6, 151, 10, 0, 1250, 318, 1, 0, 0, 0, 1251, 1252, 3, 167, 76, 0, 1252, 1253, 1, 0, 0, 0, 1253, 1254, 6, 152, 16, 0, 1254, 1255, 6, 152, 11, 0, 1255, 320, 1, 0, 0, 0, 1256, 1257, 5, 58, 0, 0, 1257, 322, 1, 0, 0, 0, 1258, 1264, 3, 75, 30, 0, 1259, 1264, 3, 65, 25, 0, 1260, 1264, 3, 105, 45, 0, 1261, 1264, 3, 67, 26, 0, 1262, 1264, 3, 81, 33, 0, 1263, 1258, 1, 0, 0, 0, 1263, 1259, 1, 0, 0, 0, 1263, 1260, 1, 0, 0, 0, 1263, 1261, 1, 0, 0, 0, 1263, 1262, 1, 0, 0, 0, 1264, 1265, 1, 0, 0, 0, 1265, 1263, 1, 0, 0, 0, 1265, 1266, 1, 0, 0, 0, 1266, 324, 1, 0, 0, 0, 1267, 1268, 3, 57, 21, 0, 1268, 1269, 1, 0, 0, 0, 1269, 1270, 6, 155, 10, 0, 1270, 326, 1, 0, 0, 0, 1271, 1272, 3, 59, 22, 0, 1272, 1273, 1, 0, 0, 0, 1273, 1274, 6, 156, 10, 0, 1274, 328, 1, 0, 0, 0, 1275, 1276, 3, 61, 23, 0, 1276, 1277, 1, 0, 0, 0, 1277, 1278, 6, 157, 10, 0, 1278, 330, 1, 0, 0, 0, 1279, 1280, 3, 63, 24, 0, 1280, 1281, 1, 0, 0, 0, 1281, 1282, 6, 158, 15, 0, 1282, 1283, 6, 158, 11, 0, 1283, 332, 1, 0, 0, 0, 1284, 1285, 3, 321, 153, 0, 1285, 1286, 1, 0, 0, 0, 1286, 1287, 6, 159, 17, 0, 1287, 334, 1, 0, 0, 0, 1288, 1289, 3, 101, 43, 0, 1289, 1290, 1, 0, 0, 0, 1290, 1291, 6, 160, 18, 0, 1291, 336, 1, 0, 0, 0, 1292, 1293, 3, 105, 45, 0, 1293, 1294, 1, 0, 0, 0, 1294, 1295, 6, 161, 22, 0, 1295, 338, 1, 0, 0, 0, 1296, 1297, 3, 259, 122, 0, 1297, 1298, 1, 0, 0, 0, 1298, 1299, 6, 162, 30, 0, 1299, 1300, 6, 162, 31, 0, 1300, 340, 1, 0, 0, 0, 1301, 1302, 3, 207, 96, 0, 1302, 1303, 1, 0, 0, 0, 1303, 1304, 6, 163, 20, 0, 1304, 342, 1, 0, 0, 0, 1305, 1306, 3, 85, 35, 0, 1306, 1307, 1, 0, 0, 0, 1307, 1308, 6, 164, 21, 0, 1308, 344, 1, 0, 0, 0, 1309, 1310, 3, 57, 21, 0, 1310, 1311, 1, 0, 0, 0, 1311, 1312, 6, 165, 10, 0, 1312, 346, 1, 0, 0, 0, 1313, 1314, 3, 59, 22, 0, 1314, 1315, 1, 0, 0, 0, 1315, 1316, 6, 166, 10, 0, 1316, 348, 1, 0, 0, 0, 1317, 1318, 3, 61, 23, 0, 1318, 1319, 1, 0, 0, 0, 1319, 1320, 6, 167, 10, 0, 1320, 350, 1, 0, 0, 0, 1321, 1322, 3, 63, 24, 0, 1322, 1323, 1, 0, 0, 0, 1323, 1324, 6, 168, 15, 0, 1324, 1325, 6, 168, 11, 0, 1325, 1326, 6, 168, 11, 0, 1326, 352, 1, 0, 0, 0, 1327, 1328, 3, 101, 43, 0, 1328, 1329, 1, 0, 0, 0, 1329, 1330, 6, 169, 18, 0, 1330, 354, 1, 0, 0, 0, 1331, 1332, 3, 105, 45, 0, 1332, 1333, 1, 0, 0, 0, 1333, 1334, 6, 170, 22, 0, 1334, 356, 1, 0, 0, 0, 1335, 1336, 3, 229, 107, 0, 1336, 1337, 1, 0, 0, 0, 1337, 1338, 6, 171, 23, 0, 1338, 358, 1, 0, 0, 0, 1339, 1340, 3, 57, 21, 0, 1340, 1341, 1, 0, 0, 0, 1341, 1342, 6, 172, 10, 0, 1342, 360, 1, 0, 0, 0, 1343, 1344, 3, 59, 22, 0, 1344, 1345, 1, 0, 0, 0, 1345, 1346, 6, 173, 10, 0, 1346, 362, 1, 0, 0, 0, 1347, 1348, 3, 61, 23, 0, 1348, 1349, 1, 0, 0, 0, 1349, 1350, 6, 174, 10, 0, 1350, 364, 1, 0, 0, 0, 1351, 1352, 3, 63, 24, 0, 1352, 1353, 1, 0, 0, 0, 1353, 1354, 6, 175, 15, 0, 1354, 1355, 6, 175, 11, 0, 1355, 366, 1, 0, 0, 0, 1356, 1357, 3, 207, 96, 0, 1357, 1358, 1, 0, 0, 0, 1358, 1359, 6, 176, 20, 0, 1359, 1360, 6, 176, 11, 0, 1360, 1361, 6, 176, 32, 0, 1361, 368, 1, 0, 0, 0, 1362, 1363, 3, 85, 35, 0, 1363, 1364, 1, 0, 0, 0, 1364, 1365, 6, 177, 21, 0, 1365, 1366, 6, 177, 11, 0, 1366, 1367, 6, 177, 32, 0, 1367, 370, 1, 0, 0, 0, 1368, 1369, 3, 57, 21, 0, 1369, 1370, 1, 0, 0, 0, 1370, 1371, 6, 178, 10, 0, 1371, 372, 1, 0, 0, 0, 1372, 1373, 3, 59, 22, 0, 1373, 1374, 1, 0, 0, 0, 1374, 1375, 6, 179, 10, 0, 1375, 374, 1, 0, 0, 0, 1376, 1377, 3, 61, 23, 0, 1377, 1378, 1, 0, 0, 0, 1378, 1379, 6, 180, 10, 0, 1379, 376, 1, 0, 0, 0, 1380, 1381, 3, 321, 153, 0, 1381, 1382, 1, 0, 0, 0, 1382, 1383, 6, 181, 17, 0, 1383, 1384, 6, 181, 11, 0, 1384, 1385, 6, 181, 9, 0, 1385, 378, 1, 0, 0, 0, 1386, 1387, 3, 101, 43, 0, 1387, 1388, 1, 0, 0, 0, 1388, 1389, 6, 182, 18, 0, 1389, 1390, 6, 182, 11, 0, 1390, 1391, 6, 182, 9, 0, 1391, 380, 1, 0, 0, 0, 1392, 1393, 3, 57, 21, 0, 1393, 1394, 1, 0, 0, 0, 1394, 1395, 6, 183, 10, 0, 1395, 382, 1, 0, 0, 0, 1396, 1397, 3, 59, 22, 0, 1397, 1398, 1, 0, 0, 0, 1398, 1399, 6, 184, 10, 0, 1399, 384, 1, 0, 0, 0, 1400, 1401, 3, 61, 23, 0, 1401, 1402, 1, 0, 0, 0, 1402, 1403, 6, 185, 10, 0, 1403, 386, 1, 0, 0, 0, 1404, 1405, 3, 173, 79, 0, 1405, 1406, 1, 0, 0, 0, 1406, 1407, 6, 186, 11, 0, 1407, 1408, 6, 186, 0, 0, 1408, 1409, 6, 186, 28, 0, 1409, 388, 1, 0, 0, 0, 1410, 1411, 3, 169, 77, 0, 1411, 1412, 1, 0, 0, 0, 1412, 1413, 6, 187, 11, 0, 1413, 1414, 6, 187, 0, 0, 1414, 1415, 6, 187, 29, 0, 1415, 390, 1, 0, 0, 0, 1416, 1417, 3, 91, 38, 0, 1417, 1418, 1, 0, 0, 0, 1418, 1419, 6, 188, 11, 0, 1419, 1420, 6, 188, 0, 0, 1420, 1421, 6, 188, 33, 0, 1421, 392, 1, 0, 0, 0, 1422, 1423, 3, 63, 24, 0, 1423, 1424, 1, 0, 0, 0, 1424, 1425, 6, 189, 15, 0, 1425, 1426, 6, 189, 11, 0, 1426, 394, 1, 0, 0, 0, 65, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 572, 582, 586, 589, 598, 600, 611, 630, 635, 644, 651, 656, 658, 669, 677, 680, 682, 687, 692, 698, 705, 710, 716, 719, 727, 731, 858, 863, 870, 872, 888, 893, 898, 900, 906, 983, 988, 1027, 1031, 1036, 1041, 1046, 1048, 1052, 1054, 1131, 1135, 1140, 1263, 1265, 34, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 19, 0, 7, 65, 0, 5, 0, 0, 7, 25, 0, 7, 66, 0, 7, 104, 0, 7, 34, 0, 7, 32, 0, 7, 76, 0, 7, 26, 0, 7, 36, 0, 7, 80, 0, 5, 10, 0, 5, 7, 0, 7, 90, 0, 7, 89, 0, 7, 68, 0, 7, 67, 0, 7, 88, 0, 5, 12, 0, 5, 14, 0, 7, 29, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index a746a0d49004..d3ad1d00d749 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -26,36 +26,34 @@ public class EsqlBaseLexer extends LexerConfig { new PredictionContextCache(); public static final int DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, - LIMIT=9, META=10, MV_EXPAND=11, RENAME=12, ROW=13, SHOW=14, SORT=15, STATS=16, - WHERE=17, DEV_INLINESTATS=18, DEV_LOOKUP=19, DEV_MATCH=20, DEV_METRICS=21, - UNKNOWN_CMD=22, LINE_COMMENT=23, MULTILINE_COMMENT=24, WS=25, PIPE=26, - QUOTED_STRING=27, INTEGER_LITERAL=28, DECIMAL_LITERAL=29, BY=30, AND=31, - ASC=32, ASSIGN=33, CAST_OP=34, COMMA=35, DESC=36, DOT=37, FALSE=38, FIRST=39, - IN=40, IS=41, LAST=42, LIKE=43, LP=44, NOT=45, NULL=46, NULLS=47, OR=48, - PARAM=49, RLIKE=50, RP=51, TRUE=52, EQ=53, CIEQ=54, NEQ=55, LT=56, LTE=57, - GT=58, GTE=59, PLUS=60, MINUS=61, ASTERISK=62, SLASH=63, PERCENT=64, NAMED_OR_POSITIONAL_PARAM=65, - OPENING_BRACKET=66, CLOSING_BRACKET=67, UNQUOTED_IDENTIFIER=68, QUOTED_IDENTIFIER=69, - EXPR_LINE_COMMENT=70, EXPR_MULTILINE_COMMENT=71, EXPR_WS=72, EXPLAIN_WS=73, - EXPLAIN_LINE_COMMENT=74, EXPLAIN_MULTILINE_COMMENT=75, METADATA=76, UNQUOTED_SOURCE=77, - FROM_LINE_COMMENT=78, FROM_MULTILINE_COMMENT=79, FROM_WS=80, ID_PATTERN=81, - PROJECT_LINE_COMMENT=82, PROJECT_MULTILINE_COMMENT=83, PROJECT_WS=84, - AS=85, RENAME_LINE_COMMENT=86, RENAME_MULTILINE_COMMENT=87, RENAME_WS=88, - ON=89, WITH=90, ENRICH_POLICY_NAME=91, ENRICH_LINE_COMMENT=92, ENRICH_MULTILINE_COMMENT=93, - ENRICH_WS=94, ENRICH_FIELD_LINE_COMMENT=95, ENRICH_FIELD_MULTILINE_COMMENT=96, - ENRICH_FIELD_WS=97, MVEXPAND_LINE_COMMENT=98, MVEXPAND_MULTILINE_COMMENT=99, - MVEXPAND_WS=100, INFO=101, SHOW_LINE_COMMENT=102, SHOW_MULTILINE_COMMENT=103, - SHOW_WS=104, FUNCTIONS=105, META_LINE_COMMENT=106, META_MULTILINE_COMMENT=107, - META_WS=108, COLON=109, SETTING=110, SETTING_LINE_COMMENT=111, SETTTING_MULTILINE_COMMENT=112, - SETTING_WS=113, LOOKUP_LINE_COMMENT=114, LOOKUP_MULTILINE_COMMENT=115, - LOOKUP_WS=116, LOOKUP_FIELD_LINE_COMMENT=117, LOOKUP_FIELD_MULTILINE_COMMENT=118, - LOOKUP_FIELD_WS=119, METRICS_LINE_COMMENT=120, METRICS_MULTILINE_COMMENT=121, - METRICS_WS=122, CLOSING_METRICS_LINE_COMMENT=123, CLOSING_METRICS_MULTILINE_COMMENT=124, - CLOSING_METRICS_WS=125; + LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, + WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_MATCH=19, DEV_METRICS=20, + UNKNOWN_CMD=21, LINE_COMMENT=22, MULTILINE_COMMENT=23, WS=24, PIPE=25, + QUOTED_STRING=26, INTEGER_LITERAL=27, DECIMAL_LITERAL=28, BY=29, AND=30, + ASC=31, ASSIGN=32, CAST_OP=33, COMMA=34, DESC=35, DOT=36, FALSE=37, FIRST=38, + IN=39, IS=40, LAST=41, LIKE=42, LP=43, NOT=44, NULL=45, NULLS=46, OR=47, + PARAM=48, RLIKE=49, RP=50, TRUE=51, EQ=52, CIEQ=53, NEQ=54, LT=55, LTE=56, + GT=57, GTE=58, PLUS=59, MINUS=60, ASTERISK=61, SLASH=62, PERCENT=63, NAMED_OR_POSITIONAL_PARAM=64, + OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, + EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, EXPLAIN_WS=72, + EXPLAIN_LINE_COMMENT=73, EXPLAIN_MULTILINE_COMMENT=74, METADATA=75, UNQUOTED_SOURCE=76, + FROM_LINE_COMMENT=77, FROM_MULTILINE_COMMENT=78, FROM_WS=79, ID_PATTERN=80, + PROJECT_LINE_COMMENT=81, PROJECT_MULTILINE_COMMENT=82, PROJECT_WS=83, + AS=84, RENAME_LINE_COMMENT=85, RENAME_MULTILINE_COMMENT=86, RENAME_WS=87, + ON=88, WITH=89, ENRICH_POLICY_NAME=90, ENRICH_LINE_COMMENT=91, ENRICH_MULTILINE_COMMENT=92, + ENRICH_WS=93, ENRICH_FIELD_LINE_COMMENT=94, ENRICH_FIELD_MULTILINE_COMMENT=95, + ENRICH_FIELD_WS=96, MVEXPAND_LINE_COMMENT=97, MVEXPAND_MULTILINE_COMMENT=98, + MVEXPAND_WS=99, INFO=100, SHOW_LINE_COMMENT=101, SHOW_MULTILINE_COMMENT=102, + SHOW_WS=103, COLON=104, SETTING=105, SETTING_LINE_COMMENT=106, SETTTING_MULTILINE_COMMENT=107, + SETTING_WS=108, LOOKUP_LINE_COMMENT=109, LOOKUP_MULTILINE_COMMENT=110, + LOOKUP_WS=111, LOOKUP_FIELD_LINE_COMMENT=112, LOOKUP_FIELD_MULTILINE_COMMENT=113, + LOOKUP_FIELD_WS=114, METRICS_LINE_COMMENT=115, METRICS_MULTILINE_COMMENT=116, + METRICS_WS=117, CLOSING_METRICS_LINE_COMMENT=118, CLOSING_METRICS_MULTILINE_COMMENT=119, + CLOSING_METRICS_WS=120; public static final int EXPRESSION_MODE=1, EXPLAIN_MODE=2, FROM_MODE=3, PROJECT_MODE=4, RENAME_MODE=5, - ENRICH_MODE=6, ENRICH_FIELD_MODE=7, MVEXPAND_MODE=8, SHOW_MODE=9, META_MODE=10, - SETTING_MODE=11, LOOKUP_MODE=12, LOOKUP_FIELD_MODE=13, METRICS_MODE=14, - CLOSING_METRICS_MODE=15; + ENRICH_MODE=6, ENRICH_FIELD_MODE=7, MVEXPAND_MODE=8, SHOW_MODE=9, SETTING_MODE=10, + LOOKUP_MODE=11, LOOKUP_FIELD_MODE=12, METRICS_MODE=13, CLOSING_METRICS_MODE=14; public static String[] channelNames = { "DEFAULT_TOKEN_CHANNEL", "HIDDEN" }; @@ -63,18 +61,17 @@ public class EsqlBaseLexer extends LexerConfig { public static String[] modeNames = { "DEFAULT_MODE", "EXPRESSION_MODE", "EXPLAIN_MODE", "FROM_MODE", "PROJECT_MODE", "RENAME_MODE", "ENRICH_MODE", "ENRICH_FIELD_MODE", "MVEXPAND_MODE", "SHOW_MODE", - "META_MODE", "SETTING_MODE", "LOOKUP_MODE", "LOOKUP_FIELD_MODE", "METRICS_MODE", - "CLOSING_METRICS_MODE" + "SETTING_MODE", "LOOKUP_MODE", "LOOKUP_FIELD_MODE", "METRICS_MODE", "CLOSING_METRICS_MODE" }; private static String[] makeRuleNames() { return new String[] { "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", "KEEP", - "LIMIT", "META", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", - "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_MATCH", "DEV_METRICS", - "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "DIGIT", - "LETTER", "ESCAPE_SEQUENCE", "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", - "BACKQUOTE", "BACKQUOTE_BLOCK", "UNDERSCORE", "UNQUOTED_ID_BODY", "QUOTED_STRING", + "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", "WHERE", + "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_MATCH", "DEV_METRICS", "UNKNOWN_CMD", + "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "DIGIT", "LETTER", + "ESCAPE_SEQUENCE", "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", "BACKQUOTE", + "BACKQUOTE_BLOCK", "UNDERSCORE", "UNQUOTED_ID_BODY", "QUOTED_STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", @@ -98,8 +95,7 @@ public class EsqlBaseLexer extends LexerConfig { "ENRICH_FIELD_WS", "MVEXPAND_PIPE", "MVEXPAND_DOT", "MVEXPAND_QUOTED_IDENTIFIER", "MVEXPAND_UNQUOTED_IDENTIFIER", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "SHOW_PIPE", "INFO", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", - "SHOW_WS", "META_PIPE", "FUNCTIONS", "META_LINE_COMMENT", "META_MULTILINE_COMMENT", - "META_WS", "SETTING_CLOSING_BRACKET", "COLON", "SETTING", "SETTING_LINE_COMMENT", + "SHOW_WS", "SETTING_CLOSING_BRACKET", "COLON", "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_PIPE", "LOOKUP_COLON", "LOOKUP_COMMA", "LOOKUP_DOT", "LOOKUP_ON", "LOOKUP_UNQUOTED_SOURCE", "LOOKUP_QUOTED_SOURCE", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", @@ -117,25 +113,25 @@ public class EsqlBaseLexer extends LexerConfig { private static String[] makeLiteralNames() { return new String[] { null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", - "'grok'", "'keep'", "'limit'", "'meta'", "'mv_expand'", "'rename'", "'row'", - "'show'", "'sort'", "'stats'", "'where'", null, null, null, null, null, - null, null, null, "'|'", null, null, null, "'by'", "'and'", "'asc'", - "'='", "'::'", "','", "'desc'", "'.'", "'false'", "'first'", "'in'", - "'is'", "'last'", "'like'", "'('", "'not'", "'null'", "'nulls'", "'or'", - "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", - "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, null, "']'", - null, null, null, null, null, null, null, null, "'metadata'", null, null, - null, null, null, null, null, null, "'as'", null, null, null, "'on'", - "'with'", null, null, null, null, null, null, null, null, null, null, - "'info'", null, null, null, "'functions'", null, null, null, "':'" + "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", + "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, + null, "'|'", null, null, null, "'by'", "'and'", "'asc'", "'='", "'::'", + "','", "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", + "'like'", "'('", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", + "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", + "'+'", "'-'", "'*'", "'/'", "'%'", null, null, "']'", null, null, null, + null, null, null, null, null, "'metadata'", null, null, null, null, null, + null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, + null, null, null, null, null, null, null, null, "'info'", null, null, + null, "':'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); private static String[] makeSymbolicNames() { return new String[] { null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", - "KEEP", "LIMIT", "META", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", - "STATS", "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_MATCH", "DEV_METRICS", + "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", + "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_MATCH", "DEV_METRICS", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", @@ -151,8 +147,7 @@ public class EsqlBaseLexer extends LexerConfig { "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", - "SHOW_MULTILINE_COMMENT", "SHOW_WS", "FUNCTIONS", "META_LINE_COMMENT", - "META_MULTILINE_COMMENT", "META_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", + "SHOW_MULTILINE_COMMENT", "SHOW_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", @@ -222,15 +217,15 @@ public class EsqlBaseLexer extends LexerConfig { @Override public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { switch (ruleIndex) { - case 17: + case 16: return DEV_INLINESTATS_sempred((RuleContext)_localctx, predIndex); - case 18: + case 17: return DEV_LOOKUP_sempred((RuleContext)_localctx, predIndex); - case 19: + case 18: return DEV_MATCH_sempred((RuleContext)_localctx, predIndex); - case 20: + case 19: return DEV_METRICS_sempred((RuleContext)_localctx, predIndex); - case 74: + case 73: return DEV_MATCH_OP_sempred((RuleContext)_localctx, predIndex); } return true; @@ -272,963 +267,931 @@ public class EsqlBaseLexer extends LexerConfig { } public static final String _serializedATN = - "\u0004\u0000}\u05c2\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ + "\u0004\u0000x\u0593\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ - "\u0006\uffff\uffff\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ - "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ - "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ - "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ - "\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e\u0002\u000f\u0007\u000f"+ - "\u0002\u0010\u0007\u0010\u0002\u0011\u0007\u0011\u0002\u0012\u0007\u0012"+ - "\u0002\u0013\u0007\u0013\u0002\u0014\u0007\u0014\u0002\u0015\u0007\u0015"+ - "\u0002\u0016\u0007\u0016\u0002\u0017\u0007\u0017\u0002\u0018\u0007\u0018"+ - "\u0002\u0019\u0007\u0019\u0002\u001a\u0007\u001a\u0002\u001b\u0007\u001b"+ - "\u0002\u001c\u0007\u001c\u0002\u001d\u0007\u001d\u0002\u001e\u0007\u001e"+ - "\u0002\u001f\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007\"\u0002"+ - "#\u0007#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002"+ - "(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002"+ - "-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u0002"+ - "2\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u00076\u0002"+ - "7\u00077\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007;\u0002"+ - "<\u0007<\u0002=\u0007=\u0002>\u0007>\u0002?\u0007?\u0002@\u0007@\u0002"+ - "A\u0007A\u0002B\u0007B\u0002C\u0007C\u0002D\u0007D\u0002E\u0007E\u0002"+ - "F\u0007F\u0002G\u0007G\u0002H\u0007H\u0002I\u0007I\u0002J\u0007J\u0002"+ - "K\u0007K\u0002L\u0007L\u0002M\u0007M\u0002N\u0007N\u0002O\u0007O\u0002"+ - "P\u0007P\u0002Q\u0007Q\u0002R\u0007R\u0002S\u0007S\u0002T\u0007T\u0002"+ - "U\u0007U\u0002V\u0007V\u0002W\u0007W\u0002X\u0007X\u0002Y\u0007Y\u0002"+ - "Z\u0007Z\u0002[\u0007[\u0002\\\u0007\\\u0002]\u0007]\u0002^\u0007^\u0002"+ - "_\u0007_\u0002`\u0007`\u0002a\u0007a\u0002b\u0007b\u0002c\u0007c\u0002"+ - "d\u0007d\u0002e\u0007e\u0002f\u0007f\u0002g\u0007g\u0002h\u0007h\u0002"+ - "i\u0007i\u0002j\u0007j\u0002k\u0007k\u0002l\u0007l\u0002m\u0007m\u0002"+ - "n\u0007n\u0002o\u0007o\u0002p\u0007p\u0002q\u0007q\u0002r\u0007r\u0002"+ - "s\u0007s\u0002t\u0007t\u0002u\u0007u\u0002v\u0007v\u0002w\u0007w\u0002"+ - "x\u0007x\u0002y\u0007y\u0002z\u0007z\u0002{\u0007{\u0002|\u0007|\u0002"+ - "}\u0007}\u0002~\u0007~\u0002\u007f\u0007\u007f\u0002\u0080\u0007\u0080"+ - "\u0002\u0081\u0007\u0081\u0002\u0082\u0007\u0082\u0002\u0083\u0007\u0083"+ - "\u0002\u0084\u0007\u0084\u0002\u0085\u0007\u0085\u0002\u0086\u0007\u0086"+ - "\u0002\u0087\u0007\u0087\u0002\u0088\u0007\u0088\u0002\u0089\u0007\u0089"+ - "\u0002\u008a\u0007\u008a\u0002\u008b\u0007\u008b\u0002\u008c\u0007\u008c"+ - "\u0002\u008d\u0007\u008d\u0002\u008e\u0007\u008e\u0002\u008f\u0007\u008f"+ - "\u0002\u0090\u0007\u0090\u0002\u0091\u0007\u0091\u0002\u0092\u0007\u0092"+ - "\u0002\u0093\u0007\u0093\u0002\u0094\u0007\u0094\u0002\u0095\u0007\u0095"+ - "\u0002\u0096\u0007\u0096\u0002\u0097\u0007\u0097\u0002\u0098\u0007\u0098"+ - "\u0002\u0099\u0007\u0099\u0002\u009a\u0007\u009a\u0002\u009b\u0007\u009b"+ - "\u0002\u009c\u0007\u009c\u0002\u009d\u0007\u009d\u0002\u009e\u0007\u009e"+ - "\u0002\u009f\u0007\u009f\u0002\u00a0\u0007\u00a0\u0002\u00a1\u0007\u00a1"+ - "\u0002\u00a2\u0007\u00a2\u0002\u00a3\u0007\u00a3\u0002\u00a4\u0007\u00a4"+ - "\u0002\u00a5\u0007\u00a5\u0002\u00a6\u0007\u00a6\u0002\u00a7\u0007\u00a7"+ - "\u0002\u00a8\u0007\u00a8\u0002\u00a9\u0007\u00a9\u0002\u00aa\u0007\u00aa"+ - "\u0002\u00ab\u0007\u00ab\u0002\u00ac\u0007\u00ac\u0002\u00ad\u0007\u00ad"+ - "\u0002\u00ae\u0007\u00ae\u0002\u00af\u0007\u00af\u0002\u00b0\u0007\u00b0"+ - "\u0002\u00b1\u0007\u00b1\u0002\u00b2\u0007\u00b2\u0002\u00b3\u0007\u00b3"+ - "\u0002\u00b4\u0007\u00b4\u0002\u00b5\u0007\u00b5\u0002\u00b6\u0007\u00b6"+ - "\u0002\u00b7\u0007\u00b7\u0002\u00b8\u0007\u00b8\u0002\u00b9\u0007\u00b9"+ - "\u0002\u00ba\u0007\u00ba\u0002\u00bb\u0007\u00bb\u0002\u00bc\u0007\u00bc"+ - "\u0002\u00bd\u0007\u00bd\u0002\u00be\u0007\u00be\u0002\u00bf\u0007\u00bf"+ - "\u0002\u00c0\u0007\u00c0\u0002\u00c1\u0007\u00c1\u0002\u00c2\u0007\u00c2"+ - "\u0002\u00c3\u0007\u00c3\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000"+ - "\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000"+ + "\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002\u0002\u0007\u0002"+ + "\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002\u0005\u0007\u0005"+ + "\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002\b\u0007\b\u0002"+ + "\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002\f\u0007\f\u0002"+ + "\r\u0007\r\u0002\u000e\u0007\u000e\u0002\u000f\u0007\u000f\u0002\u0010"+ + "\u0007\u0010\u0002\u0011\u0007\u0011\u0002\u0012\u0007\u0012\u0002\u0013"+ + "\u0007\u0013\u0002\u0014\u0007\u0014\u0002\u0015\u0007\u0015\u0002\u0016"+ + "\u0007\u0016\u0002\u0017\u0007\u0017\u0002\u0018\u0007\u0018\u0002\u0019"+ + "\u0007\u0019\u0002\u001a\u0007\u001a\u0002\u001b\u0007\u001b\u0002\u001c"+ + "\u0007\u001c\u0002\u001d\u0007\u001d\u0002\u001e\u0007\u001e\u0002\u001f"+ + "\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007\"\u0002#\u0007"+ + "#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002(\u0007"+ + "(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002-\u0007"+ + "-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u00022\u0007"+ + "2\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u00076\u00027\u0007"+ + "7\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007;\u0002<\u0007"+ + "<\u0002=\u0007=\u0002>\u0007>\u0002?\u0007?\u0002@\u0007@\u0002A\u0007"+ + "A\u0002B\u0007B\u0002C\u0007C\u0002D\u0007D\u0002E\u0007E\u0002F\u0007"+ + "F\u0002G\u0007G\u0002H\u0007H\u0002I\u0007I\u0002J\u0007J\u0002K\u0007"+ + "K\u0002L\u0007L\u0002M\u0007M\u0002N\u0007N\u0002O\u0007O\u0002P\u0007"+ + "P\u0002Q\u0007Q\u0002R\u0007R\u0002S\u0007S\u0002T\u0007T\u0002U\u0007"+ + "U\u0002V\u0007V\u0002W\u0007W\u0002X\u0007X\u0002Y\u0007Y\u0002Z\u0007"+ + "Z\u0002[\u0007[\u0002\\\u0007\\\u0002]\u0007]\u0002^\u0007^\u0002_\u0007"+ + "_\u0002`\u0007`\u0002a\u0007a\u0002b\u0007b\u0002c\u0007c\u0002d\u0007"+ + "d\u0002e\u0007e\u0002f\u0007f\u0002g\u0007g\u0002h\u0007h\u0002i\u0007"+ + "i\u0002j\u0007j\u0002k\u0007k\u0002l\u0007l\u0002m\u0007m\u0002n\u0007"+ + "n\u0002o\u0007o\u0002p\u0007p\u0002q\u0007q\u0002r\u0007r\u0002s\u0007"+ + "s\u0002t\u0007t\u0002u\u0007u\u0002v\u0007v\u0002w\u0007w\u0002x\u0007"+ + "x\u0002y\u0007y\u0002z\u0007z\u0002{\u0007{\u0002|\u0007|\u0002}\u0007"+ + "}\u0002~\u0007~\u0002\u007f\u0007\u007f\u0002\u0080\u0007\u0080\u0002"+ + "\u0081\u0007\u0081\u0002\u0082\u0007\u0082\u0002\u0083\u0007\u0083\u0002"+ + "\u0084\u0007\u0084\u0002\u0085\u0007\u0085\u0002\u0086\u0007\u0086\u0002"+ + "\u0087\u0007\u0087\u0002\u0088\u0007\u0088\u0002\u0089\u0007\u0089\u0002"+ + "\u008a\u0007\u008a\u0002\u008b\u0007\u008b\u0002\u008c\u0007\u008c\u0002"+ + "\u008d\u0007\u008d\u0002\u008e\u0007\u008e\u0002\u008f\u0007\u008f\u0002"+ + "\u0090\u0007\u0090\u0002\u0091\u0007\u0091\u0002\u0092\u0007\u0092\u0002"+ + "\u0093\u0007\u0093\u0002\u0094\u0007\u0094\u0002\u0095\u0007\u0095\u0002"+ + "\u0096\u0007\u0096\u0002\u0097\u0007\u0097\u0002\u0098\u0007\u0098\u0002"+ + "\u0099\u0007\u0099\u0002\u009a\u0007\u009a\u0002\u009b\u0007\u009b\u0002"+ + "\u009c\u0007\u009c\u0002\u009d\u0007\u009d\u0002\u009e\u0007\u009e\u0002"+ + "\u009f\u0007\u009f\u0002\u00a0\u0007\u00a0\u0002\u00a1\u0007\u00a1\u0002"+ + "\u00a2\u0007\u00a2\u0002\u00a3\u0007\u00a3\u0002\u00a4\u0007\u00a4\u0002"+ + "\u00a5\u0007\u00a5\u0002\u00a6\u0007\u00a6\u0002\u00a7\u0007\u00a7\u0002"+ + "\u00a8\u0007\u00a8\u0002\u00a9\u0007\u00a9\u0002\u00aa\u0007\u00aa\u0002"+ + "\u00ab\u0007\u00ab\u0002\u00ac\u0007\u00ac\u0002\u00ad\u0007\u00ad\u0002"+ + "\u00ae\u0007\u00ae\u0002\u00af\u0007\u00af\u0002\u00b0\u0007\u00b0\u0002"+ + "\u00b1\u0007\u00b1\u0002\u00b2\u0007\u00b2\u0002\u00b3\u0007\u00b3\u0002"+ + "\u00b4\u0007\u00b4\u0002\u00b5\u0007\u00b5\u0002\u00b6\u0007\u00b6\u0002"+ + "\u00b7\u0007\u00b7\u0002\u00b8\u0007\u00b8\u0002\u00b9\u0007\u00b9\u0002"+ + "\u00ba\u0007\u00ba\u0002\u00bb\u0007\u00bb\u0002\u00bc\u0007\u00bc\u0002"+ + "\u00bd\u0007\u00bd\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ + "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002"+ - "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004"+ - "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004"+ - "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006"+ - "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007"+ - "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ - "\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001"+ - "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001\n\u0001"+ - "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ - "\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ - "\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\f\u0001"+ - "\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001"+ - "\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ - "\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001"+ - "\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ - "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001"+ - "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ - "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ - "\u0011\u0001\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001"+ - "\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001"+ - "\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001"+ - "\u0013\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001"+ - "\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001"+ - "\u0014\u0001\u0014\u0001\u0015\u0004\u0015\u024f\b\u0015\u000b\u0015\f"+ - "\u0015\u0250\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0016"+ - "\u0001\u0016\u0005\u0016\u0259\b\u0016\n\u0016\f\u0016\u025c\t\u0016\u0001"+ - "\u0016\u0003\u0016\u025f\b\u0016\u0001\u0016\u0003\u0016\u0262\b\u0016"+ - "\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017"+ - "\u0001\u0017\u0005\u0017\u026b\b\u0017\n\u0017\f\u0017\u026e\t\u0017\u0001"+ - "\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0004"+ - "\u0018\u0276\b\u0018\u000b\u0018\f\u0018\u0277\u0001\u0018\u0001\u0018"+ - "\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a"+ - "\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001d"+ - "\u0001\u001d\u0001\u001e\u0001\u001e\u0003\u001e\u028b\b\u001e\u0001\u001e"+ - "\u0004\u001e\u028e\b\u001e\u000b\u001e\f\u001e\u028f\u0001\u001f\u0001"+ - "\u001f\u0001 \u0001 \u0001!\u0001!\u0001!\u0003!\u0299\b!\u0001\"\u0001"+ - "\"\u0001#\u0001#\u0001#\u0003#\u02a0\b#\u0001$\u0001$\u0001$\u0005$\u02a5"+ - "\b$\n$\f$\u02a8\t$\u0001$\u0001$\u0001$\u0001$\u0001$\u0001$\u0005$\u02b0"+ - "\b$\n$\f$\u02b3\t$\u0001$\u0001$\u0001$\u0001$\u0001$\u0003$\u02ba\b$"+ - "\u0001$\u0003$\u02bd\b$\u0003$\u02bf\b$\u0001%\u0004%\u02c2\b%\u000b%"+ - "\f%\u02c3\u0001&\u0004&\u02c7\b&\u000b&\f&\u02c8\u0001&\u0001&\u0005&"+ - "\u02cd\b&\n&\f&\u02d0\t&\u0001&\u0001&\u0004&\u02d4\b&\u000b&\f&\u02d5"+ - "\u0001&\u0004&\u02d9\b&\u000b&\f&\u02da\u0001&\u0001&\u0005&\u02df\b&"+ - "\n&\f&\u02e2\t&\u0003&\u02e4\b&\u0001&\u0001&\u0001&\u0001&\u0004&\u02ea"+ - "\b&\u000b&\f&\u02eb\u0001&\u0001&\u0003&\u02f0\b&\u0001\'\u0001\'\u0001"+ - "\'\u0001(\u0001(\u0001(\u0001(\u0001)\u0001)\u0001)\u0001)\u0001*\u0001"+ - "*\u0001+\u0001+\u0001+\u0001,\u0001,\u0001-\u0001-\u0001-\u0001-\u0001"+ - "-\u0001.\u0001.\u0001/\u0001/\u0001/\u0001/\u0001/\u0001/\u00010\u0001"+ - "0\u00010\u00010\u00010\u00010\u00011\u00011\u00011\u00012\u00012\u0001"+ - "2\u00013\u00013\u00013\u00013\u00013\u00014\u00014\u00014\u00014\u0001"+ - "4\u00015\u00015\u00016\u00016\u00016\u00016\u00017\u00017\u00017\u0001"+ - "7\u00017\u00018\u00018\u00018\u00018\u00018\u00018\u00019\u00019\u0001"+ - "9\u0001:\u0001:\u0001;\u0001;\u0001;\u0001;\u0001;\u0001;\u0001<\u0001"+ - "<\u0001=\u0001=\u0001=\u0001=\u0001=\u0001>\u0001>\u0001>\u0001?\u0001"+ - "?\u0001?\u0001@\u0001@\u0001@\u0001A\u0001A\u0001B\u0001B\u0001B\u0001"+ - "C\u0001C\u0001D\u0001D\u0001D\u0001E\u0001E\u0001F\u0001F\u0001G\u0001"+ - "G\u0001H\u0001H\u0001I\u0001I\u0001J\u0001J\u0001J\u0001J\u0001J\u0001"+ - "K\u0001K\u0001K\u0003K\u036f\bK\u0001K\u0005K\u0372\bK\nK\fK\u0375\tK"+ - "\u0001K\u0001K\u0004K\u0379\bK\u000bK\fK\u037a\u0003K\u037d\bK\u0001L"+ - "\u0001L\u0001L\u0001L\u0001L\u0001M\u0001M\u0001M\u0001M\u0001M\u0001"+ - "N\u0001N\u0005N\u038b\bN\nN\fN\u038e\tN\u0001N\u0001N\u0003N\u0392\bN"+ - "\u0001N\u0004N\u0395\bN\u000bN\fN\u0396\u0003N\u0399\bN\u0001O\u0001O"+ - "\u0004O\u039d\bO\u000bO\fO\u039e\u0001O\u0001O\u0001P\u0001P\u0001Q\u0001"+ - "Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001R\u0001S\u0001S\u0001S\u0001"+ - "S\u0001T\u0001T\u0001T\u0001T\u0001T\u0001U\u0001U\u0001U\u0001U\u0001"+ - "U\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001W\u0001W\u0001X\u0001"+ - "X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001Y\u0001Z\u0001Z\u0001"+ - "Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0001\\\u0001"+ - "]\u0001]\u0001]\u0001]\u0001^\u0001^\u0001^\u0001^\u0001_\u0001_\u0001"+ - "_\u0001_\u0001_\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001`\u0003"+ - "`\u03ec\b`\u0001a\u0004a\u03ef\ba\u000ba\fa\u03f0\u0001b\u0001b\u0001"+ + "\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ + "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003\u0001"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001"+ + "\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ + "\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001"+ + "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007\u0001"+ + "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ + "\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\t\u0001"+ + "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ + "\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ + "\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001"+ + "\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e"+ + "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e"+ + "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f"+ + "\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010"+ + "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010"+ + "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0011"+ + "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011"+ + "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0012\u0001\u0012\u0001\u0012"+ + "\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012"+ + "\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013"+ + "\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0014"+ + "\u0004\u0014\u023b\b\u0014\u000b\u0014\f\u0014\u023c\u0001\u0014\u0001"+ + "\u0014\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0005\u0015\u0245"+ + "\b\u0015\n\u0015\f\u0015\u0248\t\u0015\u0001\u0015\u0003\u0015\u024b\b"+ + "\u0015\u0001\u0015\u0003\u0015\u024e\b\u0015\u0001\u0015\u0001\u0015\u0001"+ + "\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0005\u0016\u0257"+ + "\b\u0016\n\u0016\f\u0016\u025a\t\u0016\u0001\u0016\u0001\u0016\u0001\u0016"+ + "\u0001\u0016\u0001\u0016\u0001\u0017\u0004\u0017\u0262\b\u0017\u000b\u0017"+ + "\f\u0017\u0263\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018\u0001\u0018"+ + "\u0001\u0018\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0001\u001b"+ + "\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001d\u0001\u001d"+ + "\u0003\u001d\u0277\b\u001d\u0001\u001d\u0004\u001d\u027a\b\u001d\u000b"+ + "\u001d\f\u001d\u027b\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0001"+ + " \u0001 \u0001 \u0003 \u0285\b \u0001!\u0001!\u0001\"\u0001\"\u0001\""+ + "\u0003\"\u028c\b\"\u0001#\u0001#\u0001#\u0005#\u0291\b#\n#\f#\u0294\t"+ + "#\u0001#\u0001#\u0001#\u0001#\u0001#\u0001#\u0005#\u029c\b#\n#\f#\u029f"+ + "\t#\u0001#\u0001#\u0001#\u0001#\u0001#\u0003#\u02a6\b#\u0001#\u0003#\u02a9"+ + "\b#\u0003#\u02ab\b#\u0001$\u0004$\u02ae\b$\u000b$\f$\u02af\u0001%\u0004"+ + "%\u02b3\b%\u000b%\f%\u02b4\u0001%\u0001%\u0005%\u02b9\b%\n%\f%\u02bc\t"+ + "%\u0001%\u0001%\u0004%\u02c0\b%\u000b%\f%\u02c1\u0001%\u0004%\u02c5\b"+ + "%\u000b%\f%\u02c6\u0001%\u0001%\u0005%\u02cb\b%\n%\f%\u02ce\t%\u0003%"+ + "\u02d0\b%\u0001%\u0001%\u0001%\u0001%\u0004%\u02d6\b%\u000b%\f%\u02d7"+ + "\u0001%\u0001%\u0003%\u02dc\b%\u0001&\u0001&\u0001&\u0001\'\u0001\'\u0001"+ + "\'\u0001\'\u0001(\u0001(\u0001(\u0001(\u0001)\u0001)\u0001*\u0001*\u0001"+ + "*\u0001+\u0001+\u0001,\u0001,\u0001,\u0001,\u0001,\u0001-\u0001-\u0001"+ + ".\u0001.\u0001.\u0001.\u0001.\u0001.\u0001/\u0001/\u0001/\u0001/\u0001"+ + "/\u0001/\u00010\u00010\u00010\u00011\u00011\u00011\u00012\u00012\u0001"+ + "2\u00012\u00012\u00013\u00013\u00013\u00013\u00013\u00014\u00014\u0001"+ + "5\u00015\u00015\u00015\u00016\u00016\u00016\u00016\u00016\u00017\u0001"+ + "7\u00017\u00017\u00017\u00017\u00018\u00018\u00018\u00019\u00019\u0001"+ + ":\u0001:\u0001:\u0001:\u0001:\u0001:\u0001;\u0001;\u0001<\u0001<\u0001"+ + "<\u0001<\u0001<\u0001=\u0001=\u0001=\u0001>\u0001>\u0001>\u0001?\u0001"+ + "?\u0001?\u0001@\u0001@\u0001A\u0001A\u0001A\u0001B\u0001B\u0001C\u0001"+ + "C\u0001C\u0001D\u0001D\u0001E\u0001E\u0001F\u0001F\u0001G\u0001G\u0001"+ + "H\u0001H\u0001I\u0001I\u0001I\u0001I\u0001I\u0001J\u0001J\u0001J\u0003"+ + "J\u035b\bJ\u0001J\u0005J\u035e\bJ\nJ\fJ\u0361\tJ\u0001J\u0001J\u0004J"+ + "\u0365\bJ\u000bJ\fJ\u0366\u0003J\u0369\bJ\u0001K\u0001K\u0001K\u0001K"+ + "\u0001K\u0001L\u0001L\u0001L\u0001L\u0001L\u0001M\u0001M\u0005M\u0377"+ + "\bM\nM\fM\u037a\tM\u0001M\u0001M\u0003M\u037e\bM\u0001M\u0004M\u0381\b"+ + "M\u000bM\fM\u0382\u0003M\u0385\bM\u0001N\u0001N\u0004N\u0389\bN\u000b"+ + "N\fN\u038a\u0001N\u0001N\u0001O\u0001O\u0001P\u0001P\u0001P\u0001P\u0001"+ + "Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001R\u0001S\u0001S\u0001"+ + "S\u0001S\u0001S\u0001T\u0001T\u0001T\u0001T\u0001T\u0001U\u0001U\u0001"+ + "U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001W\u0001W\u0001"+ + "X\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001Z\u0001"+ + "Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0001"+ + "\\\u0001]\u0001]\u0001]\u0001]\u0001^\u0001^\u0001^\u0001^\u0001^\u0001"+ + "^\u0001^\u0001^\u0001^\u0001_\u0001_\u0001_\u0003_\u03d8\b_\u0001`\u0004"+ + "`\u03db\b`\u000b`\f`\u03dc\u0001a\u0001a\u0001a\u0001a\u0001b\u0001b\u0001"+ "b\u0001b\u0001c\u0001c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001d\u0001"+ - "e\u0001e\u0001e\u0001e\u0001f\u0001f\u0001f\u0001f\u0001g\u0001g\u0001"+ + "e\u0001e\u0001e\u0001e\u0001f\u0001f\u0001f\u0001f\u0001f\u0001g\u0001"+ "g\u0001g\u0001g\u0001h\u0001h\u0001h\u0001h\u0001i\u0001i\u0001i\u0001"+ - "i\u0001j\u0001j\u0001j\u0001j\u0003j\u0418\bj\u0001k\u0001k\u0003k\u041c"+ - "\bk\u0001k\u0005k\u041f\bk\nk\fk\u0422\tk\u0001k\u0001k\u0003k\u0426\b"+ - "k\u0001k\u0004k\u0429\bk\u000bk\fk\u042a\u0003k\u042d\bk\u0001l\u0001"+ - "l\u0004l\u0431\bl\u000bl\fl\u0432\u0001m\u0001m\u0001m\u0001m\u0001n\u0001"+ - "n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001p\u0001p\u0001p\u0001"+ - "p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001r\u0001r\u0001r\u0001r\u0001"+ - "s\u0001s\u0001s\u0001s\u0001t\u0001t\u0001t\u0001u\u0001u\u0001u\u0001"+ - "u\u0001v\u0001v\u0001v\u0001v\u0001w\u0001w\u0001w\u0001w\u0001x\u0001"+ - "x\u0001x\u0001x\u0001y\u0001y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001"+ - "z\u0001z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001"+ - "|\u0001|\u0001|\u0001|\u0001|\u0001}\u0001}\u0001~\u0004~\u047e\b~\u000b"+ - "~\f~\u047f\u0001~\u0001~\u0003~\u0484\b~\u0001~\u0004~\u0487\b~\u000b"+ - "~\f~\u0488\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u0080"+ - "\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0081\u0001\u0081\u0001\u0081"+ - "\u0001\u0081\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0083"+ - "\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0084"+ - "\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0085"+ - "\u0001\u0085\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0087"+ - "\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088"+ - "\u0001\u0088\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u008a"+ - "\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b\u0001\u008b"+ - "\u0001\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d"+ - "\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e"+ - "\u0001\u008e\u0001\u008e\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f"+ - "\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0091\u0001\u0091"+ - "\u0001\u0091\u0001\u0091\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092"+ - "\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094"+ - "\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001\u0095"+ - "\u0001\u0095\u0001\u0095\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096"+ - "\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0098\u0001\u0098"+ - "\u0001\u0098\u0001\u0098\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099"+ - "\u0001\u0099\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009a"+ - "\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009b"+ - "\u0001\u009b\u0001\u009b\u0001\u009b\u0001\u009c\u0001\u009c\u0001\u009c"+ - "\u0001\u009c\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009e"+ - "\u0001\u009e\u0001\u009e\u0001\u009e\u0001\u009e\u0001\u009f\u0001\u009f"+ - "\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0004\u00a0"+ - "\u051f\b\u00a0\u000b\u00a0\f\u00a0\u0520\u0001\u00a1\u0001\u00a1\u0001"+ - "\u00a1\u0001\u00a1\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001"+ - "\u00a3\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001\u00a4\u0001\u00a4\u0001"+ - "\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001"+ - "\u00a5\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a7\u0001"+ - "\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001"+ - "\u00a8\u0001\u00a8\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001"+ - "\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00ab\u0001\u00ab\u0001"+ - "\u00ab\u0001\u00ab\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001"+ - "\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ae\u0001\u00ae\u0001"+ - "\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00af\u0001\u00af\u0001"+ - "\u00af\u0001\u00af\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001"+ - "\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b2\u0001\u00b2\u0001"+ - "\u00b2\u0001\u00b2\u0001\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b3\u0001"+ - "\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b5\u0001\u00b5\u0001"+ - "\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001"+ - "\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001"+ - "\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001"+ - "\u00b8\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00ba\u0001"+ - "\u00ba\u0001\u00ba\u0001\u00ba\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001"+ - "\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001"+ - "\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001"+ - "\u00bd\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00bf\u0001"+ - "\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001"+ - "\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001"+ - "\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001"+ - "\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001"+ - "\u00c3\u0001\u00c3\u0002\u026c\u02b1\u0000\u00c4\u0010\u0001\u0012\u0002"+ - "\u0014\u0003\u0016\u0004\u0018\u0005\u001a\u0006\u001c\u0007\u001e\b "+ - "\t\"\n$\u000b&\f(\r*\u000e,\u000f.\u00100\u00112\u00124\u00136\u00148"+ - "\u0015:\u0016<\u0017>\u0018@\u0019B\u001aD\u0000F\u0000H\u0000J\u0000"+ - "L\u0000N\u0000P\u0000R\u0000T\u0000V\u0000X\u001bZ\u001c\\\u001d^\u001e"+ - "`\u001fb d!f\"h#j$l%n&p\'r(t)v*x+z,|-~.\u0080/\u00820\u00841\u00862\u0088"+ - "3\u008a4\u008c5\u008e6\u00907\u00928\u00949\u0096:\u0098;\u009a<\u009c"+ - "=\u009e>\u00a0?\u00a2@\u00a4\u0000\u00a6A\u00a8B\u00aaC\u00acD\u00ae\u0000"+ - "\u00b0E\u00b2F\u00b4G\u00b6H\u00b8\u0000\u00ba\u0000\u00bcI\u00beJ\u00c0"+ - "K\u00c2\u0000\u00c4\u0000\u00c6\u0000\u00c8\u0000\u00ca\u0000\u00cc\u0000"+ - "\u00ceL\u00d0\u0000\u00d2M\u00d4\u0000\u00d6\u0000\u00d8N\u00daO\u00dc"+ - "P\u00de\u0000\u00e0\u0000\u00e2\u0000\u00e4\u0000\u00e6\u0000\u00e8Q\u00ea"+ - "R\u00ecS\u00eeT\u00f0\u0000\u00f2\u0000\u00f4\u0000\u00f6\u0000\u00f8"+ - "U\u00fa\u0000\u00fcV\u00feW\u0100X\u0102\u0000\u0104\u0000\u0106Y\u0108"+ - "Z\u010a\u0000\u010c[\u010e\u0000\u0110\\\u0112]\u0114^\u0116\u0000\u0118"+ - "\u0000\u011a\u0000\u011c\u0000\u011e\u0000\u0120\u0000\u0122\u0000\u0124"+ - "_\u0126`\u0128a\u012a\u0000\u012c\u0000\u012e\u0000\u0130\u0000\u0132"+ - "b\u0134c\u0136d\u0138\u0000\u013ae\u013cf\u013eg\u0140h\u0142\u0000\u0144"+ - "i\u0146j\u0148k\u014al\u014c\u0000\u014em\u0150n\u0152o\u0154p\u0156q"+ - "\u0158\u0000\u015a\u0000\u015c\u0000\u015e\u0000\u0160\u0000\u0162\u0000"+ - "\u0164\u0000\u0166r\u0168s\u016at\u016c\u0000\u016e\u0000\u0170\u0000"+ - "\u0172\u0000\u0174u\u0176v\u0178w\u017a\u0000\u017c\u0000\u017e\u0000"+ - "\u0180x\u0182y\u0184z\u0186\u0000\u0188\u0000\u018a{\u018c|\u018e}\u0190"+ - "\u0000\u0192\u0000\u0194\u0000\u0196\u0000\u0010\u0000\u0001\u0002\u0003"+ - "\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f#\u0002\u0000DDdd"+ - "\u0002\u0000IIii\u0002\u0000SSss\u0002\u0000EEee\u0002\u0000CCcc\u0002"+ - "\u0000TTtt\u0002\u0000RRrr\u0002\u0000OOoo\u0002\u0000PPpp\u0002\u0000"+ - "NNnn\u0002\u0000HHhh\u0002\u0000VVvv\u0002\u0000AAaa\u0002\u0000LLll\u0002"+ - "\u0000XXxx\u0002\u0000FFff\u0002\u0000MMmm\u0002\u0000GGgg\u0002\u0000"+ - "KKkk\u0002\u0000WWww\u0002\u0000UUuu\u0006\u0000\t\n\r\r //[[]]\u0002"+ - "\u0000\n\n\r\r\u0003\u0000\t\n\r\r \u0001\u000009\u0002\u0000AZaz\b\u0000"+ - "\"\"NNRRTT\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002\u0000++--\u0001"+ - "\u0000``\u0002\u0000BBbb\u0002\u0000YYyy\u000b\u0000\t\n\r\r \"\",,/"+ - "/::==[[]]||\u0002\u0000**//\u000b\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u05dd"+ - "\u0000\u0010\u0001\u0000\u0000\u0000\u0000\u0012\u0001\u0000\u0000\u0000"+ - "\u0000\u0014\u0001\u0000\u0000\u0000\u0000\u0016\u0001\u0000\u0000\u0000"+ - "\u0000\u0018\u0001\u0000\u0000\u0000\u0000\u001a\u0001\u0000\u0000\u0000"+ - "\u0000\u001c\u0001\u0000\u0000\u0000\u0000\u001e\u0001\u0000\u0000\u0000"+ - "\u0000 \u0001\u0000\u0000\u0000\u0000\"\u0001\u0000\u0000\u0000\u0000"+ - "$\u0001\u0000\u0000\u0000\u0000&\u0001\u0000\u0000\u0000\u0000(\u0001"+ - "\u0000\u0000\u0000\u0000*\u0001\u0000\u0000\u0000\u0000,\u0001\u0000\u0000"+ - "\u0000\u0000.\u0001\u0000\u0000\u0000\u00000\u0001\u0000\u0000\u0000\u0000"+ - "2\u0001\u0000\u0000\u0000\u00004\u0001\u0000\u0000\u0000\u00006\u0001"+ - "\u0000\u0000\u0000\u00008\u0001\u0000\u0000\u0000\u0000:\u0001\u0000\u0000"+ - "\u0000\u0000<\u0001\u0000\u0000\u0000\u0000>\u0001\u0000\u0000\u0000\u0000"+ - "@\u0001\u0000\u0000\u0000\u0001B\u0001\u0000\u0000\u0000\u0001X\u0001"+ - "\u0000\u0000\u0000\u0001Z\u0001\u0000\u0000\u0000\u0001\\\u0001\u0000"+ - "\u0000\u0000\u0001^\u0001\u0000\u0000\u0000\u0001`\u0001\u0000\u0000\u0000"+ - "\u0001b\u0001\u0000\u0000\u0000\u0001d\u0001\u0000\u0000\u0000\u0001f"+ - "\u0001\u0000\u0000\u0000\u0001h\u0001\u0000\u0000\u0000\u0001j\u0001\u0000"+ - "\u0000\u0000\u0001l\u0001\u0000\u0000\u0000\u0001n\u0001\u0000\u0000\u0000"+ - "\u0001p\u0001\u0000\u0000\u0000\u0001r\u0001\u0000\u0000\u0000\u0001t"+ - "\u0001\u0000\u0000\u0000\u0001v\u0001\u0000\u0000\u0000\u0001x\u0001\u0000"+ - "\u0000\u0000\u0001z\u0001\u0000\u0000\u0000\u0001|\u0001\u0000\u0000\u0000"+ - "\u0001~\u0001\u0000\u0000\u0000\u0001\u0080\u0001\u0000\u0000\u0000\u0001"+ - "\u0082\u0001\u0000\u0000\u0000\u0001\u0084\u0001\u0000\u0000\u0000\u0001"+ - "\u0086\u0001\u0000\u0000\u0000\u0001\u0088\u0001\u0000\u0000\u0000\u0001"+ - "\u008a\u0001\u0000\u0000\u0000\u0001\u008c\u0001\u0000\u0000\u0000\u0001"+ - "\u008e\u0001\u0000\u0000\u0000\u0001\u0090\u0001\u0000\u0000\u0000\u0001"+ - "\u0092\u0001\u0000\u0000\u0000\u0001\u0094\u0001\u0000\u0000\u0000\u0001"+ - "\u0096\u0001\u0000\u0000\u0000\u0001\u0098\u0001\u0000\u0000\u0000\u0001"+ - "\u009a\u0001\u0000\u0000\u0000\u0001\u009c\u0001\u0000\u0000\u0000\u0001"+ - "\u009e\u0001\u0000\u0000\u0000\u0001\u00a0\u0001\u0000\u0000\u0000\u0001"+ - "\u00a2\u0001\u0000\u0000\u0000\u0001\u00a4\u0001\u0000\u0000\u0000\u0001"+ - "\u00a6\u0001\u0000\u0000\u0000\u0001\u00a8\u0001\u0000\u0000\u0000\u0001"+ - "\u00aa\u0001\u0000\u0000\u0000\u0001\u00ac\u0001\u0000\u0000\u0000\u0001"+ - "\u00b0\u0001\u0000\u0000\u0000\u0001\u00b2\u0001\u0000\u0000\u0000\u0001"+ - "\u00b4\u0001\u0000\u0000\u0000\u0001\u00b6\u0001\u0000\u0000\u0000\u0002"+ - "\u00b8\u0001\u0000\u0000\u0000\u0002\u00ba\u0001\u0000\u0000\u0000\u0002"+ - "\u00bc\u0001\u0000\u0000\u0000\u0002\u00be\u0001\u0000\u0000\u0000\u0002"+ - "\u00c0\u0001\u0000\u0000\u0000\u0003\u00c2\u0001\u0000\u0000\u0000\u0003"+ - "\u00c4\u0001\u0000\u0000\u0000\u0003\u00c6\u0001\u0000\u0000\u0000\u0003"+ - "\u00c8\u0001\u0000\u0000\u0000\u0003\u00ca\u0001\u0000\u0000\u0000\u0003"+ - "\u00cc\u0001\u0000\u0000\u0000\u0003\u00ce\u0001\u0000\u0000\u0000\u0003"+ - "\u00d2\u0001\u0000\u0000\u0000\u0003\u00d4\u0001\u0000\u0000\u0000\u0003"+ - "\u00d6\u0001\u0000\u0000\u0000\u0003\u00d8\u0001\u0000\u0000\u0000\u0003"+ - "\u00da\u0001\u0000\u0000\u0000\u0003\u00dc\u0001\u0000\u0000\u0000\u0004"+ - "\u00de\u0001\u0000\u0000\u0000\u0004\u00e0\u0001\u0000\u0000\u0000\u0004"+ - "\u00e2\u0001\u0000\u0000\u0000\u0004\u00e8\u0001\u0000\u0000\u0000\u0004"+ - "\u00ea\u0001\u0000\u0000\u0000\u0004\u00ec\u0001\u0000\u0000\u0000\u0004"+ - "\u00ee\u0001\u0000\u0000\u0000\u0005\u00f0\u0001\u0000\u0000\u0000\u0005"+ - "\u00f2\u0001\u0000\u0000\u0000\u0005\u00f4\u0001\u0000\u0000\u0000\u0005"+ - "\u00f6\u0001\u0000\u0000\u0000\u0005\u00f8\u0001\u0000\u0000\u0000\u0005"+ - "\u00fa\u0001\u0000\u0000\u0000\u0005\u00fc\u0001\u0000\u0000\u0000\u0005"+ - "\u00fe\u0001\u0000\u0000\u0000\u0005\u0100\u0001\u0000\u0000\u0000\u0006"+ - "\u0102\u0001\u0000\u0000\u0000\u0006\u0104\u0001\u0000\u0000\u0000\u0006"+ - "\u0106\u0001\u0000\u0000\u0000\u0006\u0108\u0001\u0000\u0000\u0000\u0006"+ - "\u010c\u0001\u0000\u0000\u0000\u0006\u010e\u0001\u0000\u0000\u0000\u0006"+ - "\u0110\u0001\u0000\u0000\u0000\u0006\u0112\u0001\u0000\u0000\u0000\u0006"+ - "\u0114\u0001\u0000\u0000\u0000\u0007\u0116\u0001\u0000\u0000\u0000\u0007"+ - "\u0118\u0001\u0000\u0000\u0000\u0007\u011a\u0001\u0000\u0000\u0000\u0007"+ - "\u011c\u0001\u0000\u0000\u0000\u0007\u011e\u0001\u0000\u0000\u0000\u0007"+ - "\u0120\u0001\u0000\u0000\u0000\u0007\u0122\u0001\u0000\u0000\u0000\u0007"+ - "\u0124\u0001\u0000\u0000\u0000\u0007\u0126\u0001\u0000\u0000\u0000\u0007"+ - "\u0128\u0001\u0000\u0000\u0000\b\u012a\u0001\u0000\u0000\u0000\b\u012c"+ - "\u0001\u0000\u0000\u0000\b\u012e\u0001\u0000\u0000\u0000\b\u0130\u0001"+ - "\u0000\u0000\u0000\b\u0132\u0001\u0000\u0000\u0000\b\u0134\u0001\u0000"+ - "\u0000\u0000\b\u0136\u0001\u0000\u0000\u0000\t\u0138\u0001\u0000\u0000"+ - "\u0000\t\u013a\u0001\u0000\u0000\u0000\t\u013c\u0001\u0000\u0000\u0000"+ - "\t\u013e\u0001\u0000\u0000\u0000\t\u0140\u0001\u0000\u0000\u0000\n\u0142"+ - "\u0001\u0000\u0000\u0000\n\u0144\u0001\u0000\u0000\u0000\n\u0146\u0001"+ - "\u0000\u0000\u0000\n\u0148\u0001\u0000\u0000\u0000\n\u014a\u0001\u0000"+ - "\u0000\u0000\u000b\u014c\u0001\u0000\u0000\u0000\u000b\u014e\u0001\u0000"+ - "\u0000\u0000\u000b\u0150\u0001\u0000\u0000\u0000\u000b\u0152\u0001\u0000"+ - "\u0000\u0000\u000b\u0154\u0001\u0000\u0000\u0000\u000b\u0156\u0001\u0000"+ - "\u0000\u0000\f\u0158\u0001\u0000\u0000\u0000\f\u015a\u0001\u0000\u0000"+ - "\u0000\f\u015c\u0001\u0000\u0000\u0000\f\u015e\u0001\u0000\u0000\u0000"+ - "\f\u0160\u0001\u0000\u0000\u0000\f\u0162\u0001\u0000\u0000\u0000\f\u0164"+ - "\u0001\u0000\u0000\u0000\f\u0166\u0001\u0000\u0000\u0000\f\u0168\u0001"+ - "\u0000\u0000\u0000\f\u016a\u0001\u0000\u0000\u0000\r\u016c\u0001\u0000"+ - "\u0000\u0000\r\u016e\u0001\u0000\u0000\u0000\r\u0170\u0001\u0000\u0000"+ - "\u0000\r\u0172\u0001\u0000\u0000\u0000\r\u0174\u0001\u0000\u0000\u0000"+ - "\r\u0176\u0001\u0000\u0000\u0000\r\u0178\u0001\u0000\u0000\u0000\u000e"+ - "\u017a\u0001\u0000\u0000\u0000\u000e\u017c\u0001\u0000\u0000\u0000\u000e"+ - "\u017e\u0001\u0000\u0000\u0000\u000e\u0180\u0001\u0000\u0000\u0000\u000e"+ - "\u0182\u0001\u0000\u0000\u0000\u000e\u0184\u0001\u0000\u0000\u0000\u000f"+ - "\u0186\u0001\u0000\u0000\u0000\u000f\u0188\u0001\u0000\u0000\u0000\u000f"+ - "\u018a\u0001\u0000\u0000\u0000\u000f\u018c\u0001\u0000\u0000\u0000\u000f"+ - "\u018e\u0001\u0000\u0000\u0000\u000f\u0190\u0001\u0000\u0000\u0000\u000f"+ - "\u0192\u0001\u0000\u0000\u0000\u000f\u0194\u0001\u0000\u0000\u0000\u000f"+ - "\u0196\u0001\u0000\u0000\u0000\u0010\u0198\u0001\u0000\u0000\u0000\u0012"+ - "\u01a2\u0001\u0000\u0000\u0000\u0014\u01a9\u0001\u0000\u0000\u0000\u0016"+ - "\u01b2\u0001\u0000\u0000\u0000\u0018\u01b9\u0001\u0000\u0000\u0000\u001a"+ - "\u01c3\u0001\u0000\u0000\u0000\u001c\u01ca\u0001\u0000\u0000\u0000\u001e"+ - "\u01d1\u0001\u0000\u0000\u0000 \u01d8\u0001\u0000\u0000\u0000\"\u01e0"+ - "\u0001\u0000\u0000\u0000$\u01e7\u0001\u0000\u0000\u0000&\u01f3\u0001\u0000"+ - "\u0000\u0000(\u01fc\u0001\u0000\u0000\u0000*\u0202\u0001\u0000\u0000\u0000"+ - ",\u0209\u0001\u0000\u0000\u0000.\u0210\u0001\u0000\u0000\u00000\u0218"+ - "\u0001\u0000\u0000\u00002\u0220\u0001\u0000\u0000\u00004\u022f\u0001\u0000"+ - "\u0000\u00006\u0239\u0001\u0000\u0000\u00008\u0242\u0001\u0000\u0000\u0000"+ - ":\u024e\u0001\u0000\u0000\u0000<\u0254\u0001\u0000\u0000\u0000>\u0265"+ - "\u0001\u0000\u0000\u0000@\u0275\u0001\u0000\u0000\u0000B\u027b\u0001\u0000"+ - "\u0000\u0000D\u027f\u0001\u0000\u0000\u0000F\u0281\u0001\u0000\u0000\u0000"+ - "H\u0283\u0001\u0000\u0000\u0000J\u0286\u0001\u0000\u0000\u0000L\u0288"+ - "\u0001\u0000\u0000\u0000N\u0291\u0001\u0000\u0000\u0000P\u0293\u0001\u0000"+ - "\u0000\u0000R\u0298\u0001\u0000\u0000\u0000T\u029a\u0001\u0000\u0000\u0000"+ - "V\u029f\u0001\u0000\u0000\u0000X\u02be\u0001\u0000\u0000\u0000Z\u02c1"+ - "\u0001\u0000\u0000\u0000\\\u02ef\u0001\u0000\u0000\u0000^\u02f1\u0001"+ - "\u0000\u0000\u0000`\u02f4\u0001\u0000\u0000\u0000b\u02f8\u0001\u0000\u0000"+ - "\u0000d\u02fc\u0001\u0000\u0000\u0000f\u02fe\u0001\u0000\u0000\u0000h"+ - "\u0301\u0001\u0000\u0000\u0000j\u0303\u0001\u0000\u0000\u0000l\u0308\u0001"+ - "\u0000\u0000\u0000n\u030a\u0001\u0000\u0000\u0000p\u0310\u0001\u0000\u0000"+ - "\u0000r\u0316\u0001\u0000\u0000\u0000t\u0319\u0001\u0000\u0000\u0000v"+ - "\u031c\u0001\u0000\u0000\u0000x\u0321\u0001\u0000\u0000\u0000z\u0326\u0001"+ - "\u0000\u0000\u0000|\u0328\u0001\u0000\u0000\u0000~\u032c\u0001\u0000\u0000"+ - "\u0000\u0080\u0331\u0001\u0000\u0000\u0000\u0082\u0337\u0001\u0000\u0000"+ - "\u0000\u0084\u033a\u0001\u0000\u0000\u0000\u0086\u033c\u0001\u0000\u0000"+ - "\u0000\u0088\u0342\u0001\u0000\u0000\u0000\u008a\u0344\u0001\u0000\u0000"+ - "\u0000\u008c\u0349\u0001\u0000\u0000\u0000\u008e\u034c\u0001\u0000\u0000"+ - "\u0000\u0090\u034f\u0001\u0000\u0000\u0000\u0092\u0352\u0001\u0000\u0000"+ - "\u0000\u0094\u0354\u0001\u0000\u0000\u0000\u0096\u0357\u0001\u0000\u0000"+ - "\u0000\u0098\u0359\u0001\u0000\u0000\u0000\u009a\u035c\u0001\u0000\u0000"+ - "\u0000\u009c\u035e\u0001\u0000\u0000\u0000\u009e\u0360\u0001\u0000\u0000"+ - "\u0000\u00a0\u0362\u0001\u0000\u0000\u0000\u00a2\u0364\u0001\u0000\u0000"+ - "\u0000\u00a4\u0366\u0001\u0000\u0000\u0000\u00a6\u037c\u0001\u0000\u0000"+ - "\u0000\u00a8\u037e\u0001\u0000\u0000\u0000\u00aa\u0383\u0001\u0000\u0000"+ - "\u0000\u00ac\u0398\u0001\u0000\u0000\u0000\u00ae\u039a\u0001\u0000\u0000"+ - "\u0000\u00b0\u03a2\u0001\u0000\u0000\u0000\u00b2\u03a4\u0001\u0000\u0000"+ - "\u0000\u00b4\u03a8\u0001\u0000\u0000\u0000\u00b6\u03ac\u0001\u0000\u0000"+ - "\u0000\u00b8\u03b0\u0001\u0000\u0000\u0000\u00ba\u03b5\u0001\u0000\u0000"+ - "\u0000\u00bc\u03ba\u0001\u0000\u0000\u0000\u00be\u03be\u0001\u0000\u0000"+ - "\u0000\u00c0\u03c2\u0001\u0000\u0000\u0000\u00c2\u03c6\u0001\u0000\u0000"+ - "\u0000\u00c4\u03cb\u0001\u0000\u0000\u0000\u00c6\u03cf\u0001\u0000\u0000"+ - "\u0000\u00c8\u03d3\u0001\u0000\u0000\u0000\u00ca\u03d7\u0001\u0000\u0000"+ - "\u0000\u00cc\u03db\u0001\u0000\u0000\u0000\u00ce\u03df\u0001\u0000\u0000"+ - "\u0000\u00d0\u03eb\u0001\u0000\u0000\u0000\u00d2\u03ee\u0001\u0000\u0000"+ - "\u0000\u00d4\u03f2\u0001\u0000\u0000\u0000\u00d6\u03f6\u0001\u0000\u0000"+ - "\u0000\u00d8\u03fa\u0001\u0000\u0000\u0000\u00da\u03fe\u0001\u0000\u0000"+ - "\u0000\u00dc\u0402\u0001\u0000\u0000\u0000\u00de\u0406\u0001\u0000\u0000"+ - "\u0000\u00e0\u040b\u0001\u0000\u0000\u0000\u00e2\u040f\u0001\u0000\u0000"+ - "\u0000\u00e4\u0417\u0001\u0000\u0000\u0000\u00e6\u042c\u0001\u0000\u0000"+ - "\u0000\u00e8\u0430\u0001\u0000\u0000\u0000\u00ea\u0434\u0001\u0000\u0000"+ - "\u0000\u00ec\u0438\u0001\u0000\u0000\u0000\u00ee\u043c\u0001\u0000\u0000"+ - "\u0000\u00f0\u0440\u0001\u0000\u0000\u0000\u00f2\u0445\u0001\u0000\u0000"+ - "\u0000\u00f4\u0449\u0001\u0000\u0000\u0000\u00f6\u044d\u0001\u0000\u0000"+ - "\u0000\u00f8\u0451\u0001\u0000\u0000\u0000\u00fa\u0454\u0001\u0000\u0000"+ - "\u0000\u00fc\u0458\u0001\u0000\u0000\u0000\u00fe\u045c\u0001\u0000\u0000"+ - "\u0000\u0100\u0460\u0001\u0000\u0000\u0000\u0102\u0464\u0001\u0000\u0000"+ - "\u0000\u0104\u0469\u0001\u0000\u0000\u0000\u0106\u046e\u0001\u0000\u0000"+ - "\u0000\u0108\u0473\u0001\u0000\u0000\u0000\u010a\u047a\u0001\u0000\u0000"+ - "\u0000\u010c\u0483\u0001\u0000\u0000\u0000\u010e\u048a\u0001\u0000\u0000"+ - "\u0000\u0110\u048e\u0001\u0000\u0000\u0000\u0112\u0492\u0001\u0000\u0000"+ - "\u0000\u0114\u0496\u0001\u0000\u0000\u0000\u0116\u049a\u0001\u0000\u0000"+ - "\u0000\u0118\u04a0\u0001\u0000\u0000\u0000\u011a\u04a4\u0001\u0000\u0000"+ - "\u0000\u011c\u04a8\u0001\u0000\u0000\u0000\u011e\u04ac\u0001\u0000\u0000"+ - "\u0000\u0120\u04b0\u0001\u0000\u0000\u0000\u0122\u04b4\u0001\u0000\u0000"+ - "\u0000\u0124\u04b8\u0001\u0000\u0000\u0000\u0126\u04bc\u0001\u0000\u0000"+ - "\u0000\u0128\u04c0\u0001\u0000\u0000\u0000\u012a\u04c4\u0001\u0000\u0000"+ - "\u0000\u012c\u04c9\u0001\u0000\u0000\u0000\u012e\u04cd\u0001\u0000\u0000"+ - "\u0000\u0130\u04d1\u0001\u0000\u0000\u0000\u0132\u04d5\u0001\u0000\u0000"+ - "\u0000\u0134\u04d9\u0001\u0000\u0000\u0000\u0136\u04dd\u0001\u0000\u0000"+ - "\u0000\u0138\u04e1\u0001\u0000\u0000\u0000\u013a\u04e6\u0001\u0000\u0000"+ - "\u0000\u013c\u04eb\u0001\u0000\u0000\u0000\u013e\u04ef\u0001\u0000\u0000"+ - "\u0000\u0140\u04f3\u0001\u0000\u0000\u0000\u0142\u04f7\u0001\u0000\u0000"+ - "\u0000\u0144\u04fc\u0001\u0000\u0000\u0000\u0146\u0506\u0001\u0000\u0000"+ - "\u0000\u0148\u050a\u0001\u0000\u0000\u0000\u014a\u050e\u0001\u0000\u0000"+ - "\u0000\u014c\u0512\u0001\u0000\u0000\u0000\u014e\u0517\u0001\u0000\u0000"+ - "\u0000\u0150\u051e\u0001\u0000\u0000\u0000\u0152\u0522\u0001\u0000\u0000"+ - "\u0000\u0154\u0526\u0001\u0000\u0000\u0000\u0156\u052a\u0001\u0000\u0000"+ - "\u0000\u0158\u052e\u0001\u0000\u0000\u0000\u015a\u0533\u0001\u0000\u0000"+ - "\u0000\u015c\u0537\u0001\u0000\u0000\u0000\u015e\u053b\u0001\u0000\u0000"+ - "\u0000\u0160\u053f\u0001\u0000\u0000\u0000\u0162\u0544\u0001\u0000\u0000"+ - "\u0000\u0164\u0548\u0001\u0000\u0000\u0000\u0166\u054c\u0001\u0000\u0000"+ - "\u0000\u0168\u0550\u0001\u0000\u0000\u0000\u016a\u0554\u0001\u0000\u0000"+ - "\u0000\u016c\u0558\u0001\u0000\u0000\u0000\u016e\u055e\u0001\u0000\u0000"+ - "\u0000\u0170\u0562\u0001\u0000\u0000\u0000\u0172\u0566\u0001\u0000\u0000"+ - "\u0000\u0174\u056a\u0001\u0000\u0000\u0000\u0176\u056e\u0001\u0000\u0000"+ - "\u0000\u0178\u0572\u0001\u0000\u0000\u0000\u017a\u0576\u0001\u0000\u0000"+ - "\u0000\u017c\u057b\u0001\u0000\u0000\u0000\u017e\u0581\u0001\u0000\u0000"+ - "\u0000\u0180\u0587\u0001\u0000\u0000\u0000\u0182\u058b\u0001\u0000\u0000"+ - "\u0000\u0184\u058f\u0001\u0000\u0000\u0000\u0186\u0593\u0001\u0000\u0000"+ - "\u0000\u0188\u0599\u0001\u0000\u0000\u0000\u018a\u059f\u0001\u0000\u0000"+ - "\u0000\u018c\u05a3\u0001\u0000\u0000\u0000\u018e\u05a7\u0001\u0000\u0000"+ - "\u0000\u0190\u05ab\u0001\u0000\u0000\u0000\u0192\u05b1\u0001\u0000\u0000"+ - "\u0000\u0194\u05b7\u0001\u0000\u0000\u0000\u0196\u05bd\u0001\u0000\u0000"+ - "\u0000\u0198\u0199\u0007\u0000\u0000\u0000\u0199\u019a\u0007\u0001\u0000"+ - "\u0000\u019a\u019b\u0007\u0002\u0000\u0000\u019b\u019c\u0007\u0002\u0000"+ - "\u0000\u019c\u019d\u0007\u0003\u0000\u0000\u019d\u019e\u0007\u0004\u0000"+ - "\u0000\u019e\u019f\u0007\u0005\u0000\u0000\u019f\u01a0\u0001\u0000\u0000"+ - "\u0000\u01a0\u01a1\u0006\u0000\u0000\u0000\u01a1\u0011\u0001\u0000\u0000"+ - "\u0000\u01a2\u01a3\u0007\u0000\u0000\u0000\u01a3\u01a4\u0007\u0006\u0000"+ - "\u0000\u01a4\u01a5\u0007\u0007\u0000\u0000\u01a5\u01a6\u0007\b\u0000\u0000"+ - "\u01a6\u01a7\u0001\u0000\u0000\u0000\u01a7\u01a8\u0006\u0001\u0001\u0000"+ - "\u01a8\u0013\u0001\u0000\u0000\u0000\u01a9\u01aa\u0007\u0003\u0000\u0000"+ - "\u01aa\u01ab\u0007\t\u0000\u0000\u01ab\u01ac\u0007\u0006\u0000\u0000\u01ac"+ - "\u01ad\u0007\u0001\u0000\u0000\u01ad\u01ae\u0007\u0004\u0000\u0000\u01ae"+ - "\u01af\u0007\n\u0000\u0000\u01af\u01b0\u0001\u0000\u0000\u0000\u01b0\u01b1"+ - "\u0006\u0002\u0002\u0000\u01b1\u0015\u0001\u0000\u0000\u0000\u01b2\u01b3"+ - "\u0007\u0003\u0000\u0000\u01b3\u01b4\u0007\u000b\u0000\u0000\u01b4\u01b5"+ - "\u0007\f\u0000\u0000\u01b5\u01b6\u0007\r\u0000\u0000\u01b6\u01b7\u0001"+ - "\u0000\u0000\u0000\u01b7\u01b8\u0006\u0003\u0000\u0000\u01b8\u0017\u0001"+ - "\u0000\u0000\u0000\u01b9\u01ba\u0007\u0003\u0000\u0000\u01ba\u01bb\u0007"+ - "\u000e\u0000\u0000\u01bb\u01bc\u0007\b\u0000\u0000\u01bc\u01bd\u0007\r"+ - "\u0000\u0000\u01bd\u01be\u0007\f\u0000\u0000\u01be\u01bf\u0007\u0001\u0000"+ - "\u0000\u01bf\u01c0\u0007\t\u0000\u0000\u01c0\u01c1\u0001\u0000\u0000\u0000"+ - "\u01c1\u01c2\u0006\u0004\u0003\u0000\u01c2\u0019\u0001\u0000\u0000\u0000"+ - "\u01c3\u01c4\u0007\u000f\u0000\u0000\u01c4\u01c5\u0007\u0006\u0000\u0000"+ - "\u01c5\u01c6\u0007\u0007\u0000\u0000\u01c6\u01c7\u0007\u0010\u0000\u0000"+ - "\u01c7\u01c8\u0001\u0000\u0000\u0000\u01c8\u01c9\u0006\u0005\u0004\u0000"+ - "\u01c9\u001b\u0001\u0000\u0000\u0000\u01ca\u01cb\u0007\u0011\u0000\u0000"+ - "\u01cb\u01cc\u0007\u0006\u0000\u0000\u01cc\u01cd\u0007\u0007\u0000\u0000"+ - "\u01cd\u01ce\u0007\u0012\u0000\u0000\u01ce\u01cf\u0001\u0000\u0000\u0000"+ - "\u01cf\u01d0\u0006\u0006\u0000\u0000\u01d0\u001d\u0001\u0000\u0000\u0000"+ - "\u01d1\u01d2\u0007\u0012\u0000\u0000\u01d2\u01d3\u0007\u0003\u0000\u0000"+ - "\u01d3\u01d4\u0007\u0003\u0000\u0000\u01d4\u01d5\u0007\b\u0000\u0000\u01d5"+ - "\u01d6\u0001\u0000\u0000\u0000\u01d6\u01d7\u0006\u0007\u0001\u0000\u01d7"+ - "\u001f\u0001\u0000\u0000\u0000\u01d8\u01d9\u0007\r\u0000\u0000\u01d9\u01da"+ - "\u0007\u0001\u0000\u0000\u01da\u01db\u0007\u0010\u0000\u0000\u01db\u01dc"+ - "\u0007\u0001\u0000\u0000\u01dc\u01dd\u0007\u0005\u0000\u0000\u01dd\u01de"+ - "\u0001\u0000\u0000\u0000\u01de\u01df\u0006\b\u0000\u0000\u01df!\u0001"+ - "\u0000\u0000\u0000\u01e0\u01e1\u0007\u0010\u0000\u0000\u01e1\u01e2\u0007"+ - "\u0003\u0000\u0000\u01e2\u01e3\u0007\u0005\u0000\u0000\u01e3\u01e4\u0007"+ - "\f\u0000\u0000\u01e4\u01e5\u0001\u0000\u0000\u0000\u01e5\u01e6\u0006\t"+ - "\u0005\u0000\u01e6#\u0001\u0000\u0000\u0000\u01e7\u01e8\u0007\u0010\u0000"+ - "\u0000\u01e8\u01e9\u0007\u000b\u0000\u0000\u01e9\u01ea\u0005_\u0000\u0000"+ - "\u01ea\u01eb\u0007\u0003\u0000\u0000\u01eb\u01ec\u0007\u000e\u0000\u0000"+ - "\u01ec\u01ed\u0007\b\u0000\u0000\u01ed\u01ee\u0007\f\u0000\u0000\u01ee"+ - "\u01ef\u0007\t\u0000\u0000\u01ef\u01f0\u0007\u0000\u0000\u0000\u01f0\u01f1"+ - "\u0001\u0000\u0000\u0000\u01f1\u01f2\u0006\n\u0006\u0000\u01f2%\u0001"+ - "\u0000\u0000\u0000\u01f3\u01f4\u0007\u0006\u0000\u0000\u01f4\u01f5\u0007"+ - "\u0003\u0000\u0000\u01f5\u01f6\u0007\t\u0000\u0000\u01f6\u01f7\u0007\f"+ - "\u0000\u0000\u01f7\u01f8\u0007\u0010\u0000\u0000\u01f8\u01f9\u0007\u0003"+ - "\u0000\u0000\u01f9\u01fa\u0001\u0000\u0000\u0000\u01fa\u01fb\u0006\u000b"+ - "\u0007\u0000\u01fb\'\u0001\u0000\u0000\u0000\u01fc\u01fd\u0007\u0006\u0000"+ - "\u0000\u01fd\u01fe\u0007\u0007\u0000\u0000\u01fe\u01ff\u0007\u0013\u0000"+ - "\u0000\u01ff\u0200\u0001\u0000\u0000\u0000\u0200\u0201\u0006\f\u0000\u0000"+ - "\u0201)\u0001\u0000\u0000\u0000\u0202\u0203\u0007\u0002\u0000\u0000\u0203"+ - "\u0204\u0007\n\u0000\u0000\u0204\u0205\u0007\u0007\u0000\u0000\u0205\u0206"+ - "\u0007\u0013\u0000\u0000\u0206\u0207\u0001\u0000\u0000\u0000\u0207\u0208"+ - "\u0006\r\b\u0000\u0208+\u0001\u0000\u0000\u0000\u0209\u020a\u0007\u0002"+ - "\u0000\u0000\u020a\u020b\u0007\u0007\u0000\u0000\u020b\u020c\u0007\u0006"+ - "\u0000\u0000\u020c\u020d\u0007\u0005\u0000\u0000\u020d\u020e\u0001\u0000"+ - "\u0000\u0000\u020e\u020f\u0006\u000e\u0000\u0000\u020f-\u0001\u0000\u0000"+ - "\u0000\u0210\u0211\u0007\u0002\u0000\u0000\u0211\u0212\u0007\u0005\u0000"+ - "\u0000\u0212\u0213\u0007\f\u0000\u0000\u0213\u0214\u0007\u0005\u0000\u0000"+ - "\u0214\u0215\u0007\u0002\u0000\u0000\u0215\u0216\u0001\u0000\u0000\u0000"+ - "\u0216\u0217\u0006\u000f\u0000\u0000\u0217/\u0001\u0000\u0000\u0000\u0218"+ - "\u0219\u0007\u0013\u0000\u0000\u0219\u021a\u0007\n\u0000\u0000\u021a\u021b"+ - "\u0007\u0003\u0000\u0000\u021b\u021c\u0007\u0006\u0000\u0000\u021c\u021d"+ - "\u0007\u0003\u0000\u0000\u021d\u021e\u0001\u0000\u0000\u0000\u021e\u021f"+ - "\u0006\u0010\u0000\u0000\u021f1\u0001\u0000\u0000\u0000\u0220\u0221\u0004"+ - "\u0011\u0000\u0000\u0221\u0222\u0007\u0001\u0000\u0000\u0222\u0223\u0007"+ - "\t\u0000\u0000\u0223\u0224\u0007\r\u0000\u0000\u0224\u0225\u0007\u0001"+ - "\u0000\u0000\u0225\u0226\u0007\t\u0000\u0000\u0226\u0227\u0007\u0003\u0000"+ - "\u0000\u0227\u0228\u0007\u0002\u0000\u0000\u0228\u0229\u0007\u0005\u0000"+ - "\u0000\u0229\u022a\u0007\f\u0000\u0000\u022a\u022b\u0007\u0005\u0000\u0000"+ - "\u022b\u022c\u0007\u0002\u0000\u0000\u022c\u022d\u0001\u0000\u0000\u0000"+ - "\u022d\u022e\u0006\u0011\u0000\u0000\u022e3\u0001\u0000\u0000\u0000\u022f"+ - "\u0230\u0004\u0012\u0001\u0000\u0230\u0231\u0007\r\u0000\u0000\u0231\u0232"+ - "\u0007\u0007\u0000\u0000\u0232\u0233\u0007\u0007\u0000\u0000\u0233\u0234"+ - "\u0007\u0012\u0000\u0000\u0234\u0235\u0007\u0014\u0000\u0000\u0235\u0236"+ - "\u0007\b\u0000\u0000\u0236\u0237\u0001\u0000\u0000\u0000\u0237\u0238\u0006"+ - "\u0012\t\u0000\u02385\u0001\u0000\u0000\u0000\u0239\u023a\u0004\u0013"+ - "\u0002\u0000\u023a\u023b\u0007\u0010\u0000\u0000\u023b\u023c\u0007\f\u0000"+ - "\u0000\u023c\u023d\u0007\u0005\u0000\u0000\u023d\u023e\u0007\u0004\u0000"+ - "\u0000\u023e\u023f\u0007\n\u0000\u0000\u023f\u0240\u0001\u0000\u0000\u0000"+ - "\u0240\u0241\u0006\u0013\u0000\u0000\u02417\u0001\u0000\u0000\u0000\u0242"+ - "\u0243\u0004\u0014\u0003\u0000\u0243\u0244\u0007\u0010\u0000\u0000\u0244"+ - "\u0245\u0007\u0003\u0000\u0000\u0245\u0246\u0007\u0005\u0000\u0000\u0246"+ - "\u0247\u0007\u0006\u0000\u0000\u0247\u0248\u0007\u0001\u0000\u0000\u0248"+ - "\u0249\u0007\u0004\u0000\u0000\u0249\u024a\u0007\u0002\u0000\u0000\u024a"+ - "\u024b\u0001\u0000\u0000\u0000\u024b\u024c\u0006\u0014\n\u0000\u024c9"+ - "\u0001\u0000\u0000\u0000\u024d\u024f\b\u0015\u0000\u0000\u024e\u024d\u0001"+ - "\u0000\u0000\u0000\u024f\u0250\u0001\u0000\u0000\u0000\u0250\u024e\u0001"+ - "\u0000\u0000\u0000\u0250\u0251\u0001\u0000\u0000\u0000\u0251\u0252\u0001"+ - "\u0000\u0000\u0000\u0252\u0253\u0006\u0015\u0000\u0000\u0253;\u0001\u0000"+ - "\u0000\u0000\u0254\u0255\u0005/\u0000\u0000\u0255\u0256\u0005/\u0000\u0000"+ - "\u0256\u025a\u0001\u0000\u0000\u0000\u0257\u0259\b\u0016\u0000\u0000\u0258"+ - "\u0257\u0001\u0000\u0000\u0000\u0259\u025c\u0001\u0000\u0000\u0000\u025a"+ - "\u0258\u0001\u0000\u0000\u0000\u025a\u025b\u0001\u0000\u0000\u0000\u025b"+ - "\u025e\u0001\u0000\u0000\u0000\u025c\u025a\u0001\u0000\u0000\u0000\u025d"+ - "\u025f\u0005\r\u0000\u0000\u025e\u025d\u0001\u0000\u0000\u0000\u025e\u025f"+ - "\u0001\u0000\u0000\u0000\u025f\u0261\u0001\u0000\u0000\u0000\u0260\u0262"+ - "\u0005\n\u0000\u0000\u0261\u0260\u0001\u0000\u0000\u0000\u0261\u0262\u0001"+ - "\u0000\u0000\u0000\u0262\u0263\u0001\u0000\u0000\u0000\u0263\u0264\u0006"+ - "\u0016\u000b\u0000\u0264=\u0001\u0000\u0000\u0000\u0265\u0266\u0005/\u0000"+ - "\u0000\u0266\u0267\u0005*\u0000\u0000\u0267\u026c\u0001\u0000\u0000\u0000"+ - "\u0268\u026b\u0003>\u0017\u0000\u0269\u026b\t\u0000\u0000\u0000\u026a"+ - "\u0268\u0001\u0000\u0000\u0000\u026a\u0269\u0001\u0000\u0000\u0000\u026b"+ - "\u026e\u0001\u0000\u0000\u0000\u026c\u026d\u0001\u0000\u0000\u0000\u026c"+ - "\u026a\u0001\u0000\u0000\u0000\u026d\u026f\u0001\u0000\u0000\u0000\u026e"+ - "\u026c\u0001\u0000\u0000\u0000\u026f\u0270\u0005*\u0000\u0000\u0270\u0271"+ - "\u0005/\u0000\u0000\u0271\u0272\u0001\u0000\u0000\u0000\u0272\u0273\u0006"+ - "\u0017\u000b\u0000\u0273?\u0001\u0000\u0000\u0000\u0274\u0276\u0007\u0017"+ - "\u0000\u0000\u0275\u0274\u0001\u0000\u0000\u0000\u0276\u0277\u0001\u0000"+ - "\u0000\u0000\u0277\u0275\u0001\u0000\u0000\u0000\u0277\u0278\u0001\u0000"+ - "\u0000\u0000\u0278\u0279\u0001\u0000\u0000\u0000\u0279\u027a\u0006\u0018"+ - "\u000b\u0000\u027aA\u0001\u0000\u0000\u0000\u027b\u027c\u0005|\u0000\u0000"+ - "\u027c\u027d\u0001\u0000\u0000\u0000\u027d\u027e\u0006\u0019\f\u0000\u027e"+ - "C\u0001\u0000\u0000\u0000\u027f\u0280\u0007\u0018\u0000\u0000\u0280E\u0001"+ - "\u0000\u0000\u0000\u0281\u0282\u0007\u0019\u0000\u0000\u0282G\u0001\u0000"+ - "\u0000\u0000\u0283\u0284\u0005\\\u0000\u0000\u0284\u0285\u0007\u001a\u0000"+ - "\u0000\u0285I\u0001\u0000\u0000\u0000\u0286\u0287\b\u001b\u0000\u0000"+ - "\u0287K\u0001\u0000\u0000\u0000\u0288\u028a\u0007\u0003\u0000\u0000\u0289"+ - "\u028b\u0007\u001c\u0000\u0000\u028a\u0289\u0001\u0000\u0000\u0000\u028a"+ - "\u028b\u0001\u0000\u0000\u0000\u028b\u028d\u0001\u0000\u0000\u0000\u028c"+ - "\u028e\u0003D\u001a\u0000\u028d\u028c\u0001\u0000\u0000\u0000\u028e\u028f"+ - "\u0001\u0000\u0000\u0000\u028f\u028d\u0001\u0000\u0000\u0000\u028f\u0290"+ - "\u0001\u0000\u0000\u0000\u0290M\u0001\u0000\u0000\u0000\u0291\u0292\u0005"+ - "@\u0000\u0000\u0292O\u0001\u0000\u0000\u0000\u0293\u0294\u0005`\u0000"+ - "\u0000\u0294Q\u0001\u0000\u0000\u0000\u0295\u0299\b\u001d\u0000\u0000"+ - "\u0296\u0297\u0005`\u0000\u0000\u0297\u0299\u0005`\u0000\u0000\u0298\u0295"+ - "\u0001\u0000\u0000\u0000\u0298\u0296\u0001\u0000\u0000\u0000\u0299S\u0001"+ - "\u0000\u0000\u0000\u029a\u029b\u0005_\u0000\u0000\u029bU\u0001\u0000\u0000"+ - "\u0000\u029c\u02a0\u0003F\u001b\u0000\u029d\u02a0\u0003D\u001a\u0000\u029e"+ - "\u02a0\u0003T\"\u0000\u029f\u029c\u0001\u0000\u0000\u0000\u029f\u029d"+ - "\u0001\u0000\u0000\u0000\u029f\u029e\u0001\u0000\u0000\u0000\u02a0W\u0001"+ - "\u0000\u0000\u0000\u02a1\u02a6\u0005\"\u0000\u0000\u02a2\u02a5\u0003H"+ - "\u001c\u0000\u02a3\u02a5\u0003J\u001d\u0000\u02a4\u02a2\u0001\u0000\u0000"+ - "\u0000\u02a4\u02a3\u0001\u0000\u0000\u0000\u02a5\u02a8\u0001\u0000\u0000"+ - "\u0000\u02a6\u02a4\u0001\u0000\u0000\u0000\u02a6\u02a7\u0001\u0000\u0000"+ - "\u0000\u02a7\u02a9\u0001\u0000\u0000\u0000\u02a8\u02a6\u0001\u0000\u0000"+ - "\u0000\u02a9\u02bf\u0005\"\u0000\u0000\u02aa\u02ab\u0005\"\u0000\u0000"+ - "\u02ab\u02ac\u0005\"\u0000\u0000\u02ac\u02ad\u0005\"\u0000\u0000\u02ad"+ - "\u02b1\u0001\u0000\u0000\u0000\u02ae\u02b0\b\u0016\u0000\u0000\u02af\u02ae"+ - "\u0001\u0000\u0000\u0000\u02b0\u02b3\u0001\u0000\u0000\u0000\u02b1\u02b2"+ - "\u0001\u0000\u0000\u0000\u02b1\u02af\u0001\u0000\u0000\u0000\u02b2\u02b4"+ - "\u0001\u0000\u0000\u0000\u02b3\u02b1\u0001\u0000\u0000\u0000\u02b4\u02b5"+ - "\u0005\"\u0000\u0000\u02b5\u02b6\u0005\"\u0000\u0000\u02b6\u02b7\u0005"+ - "\"\u0000\u0000\u02b7\u02b9\u0001\u0000\u0000\u0000\u02b8\u02ba\u0005\""+ - "\u0000\u0000\u02b9\u02b8\u0001\u0000\u0000\u0000\u02b9\u02ba\u0001\u0000"+ - "\u0000\u0000\u02ba\u02bc\u0001\u0000\u0000\u0000\u02bb\u02bd\u0005\"\u0000"+ - "\u0000\u02bc\u02bb\u0001\u0000\u0000\u0000\u02bc\u02bd\u0001\u0000\u0000"+ - "\u0000\u02bd\u02bf\u0001\u0000\u0000\u0000\u02be\u02a1\u0001\u0000\u0000"+ - "\u0000\u02be\u02aa\u0001\u0000\u0000\u0000\u02bfY\u0001\u0000\u0000\u0000"+ - "\u02c0\u02c2\u0003D\u001a\u0000\u02c1\u02c0\u0001\u0000\u0000\u0000\u02c2"+ - "\u02c3\u0001\u0000\u0000\u0000\u02c3\u02c1\u0001\u0000\u0000\u0000\u02c3"+ - "\u02c4\u0001\u0000\u0000\u0000\u02c4[\u0001\u0000\u0000\u0000\u02c5\u02c7"+ - "\u0003D\u001a\u0000\u02c6\u02c5\u0001\u0000\u0000\u0000\u02c7\u02c8\u0001"+ - "\u0000\u0000\u0000\u02c8\u02c6\u0001\u0000\u0000\u0000\u02c8\u02c9\u0001"+ - "\u0000\u0000\u0000\u02c9\u02ca\u0001\u0000\u0000\u0000\u02ca\u02ce\u0003"+ - "l.\u0000\u02cb\u02cd\u0003D\u001a\u0000\u02cc\u02cb\u0001\u0000\u0000"+ - "\u0000\u02cd\u02d0\u0001\u0000\u0000\u0000\u02ce\u02cc\u0001\u0000\u0000"+ - "\u0000\u02ce\u02cf\u0001\u0000\u0000\u0000\u02cf\u02f0\u0001\u0000\u0000"+ - "\u0000\u02d0\u02ce\u0001\u0000\u0000\u0000\u02d1\u02d3\u0003l.\u0000\u02d2"+ - "\u02d4\u0003D\u001a\u0000\u02d3\u02d2\u0001\u0000\u0000\u0000\u02d4\u02d5"+ - "\u0001\u0000\u0000\u0000\u02d5\u02d3\u0001\u0000\u0000\u0000\u02d5\u02d6"+ - "\u0001\u0000\u0000\u0000\u02d6\u02f0\u0001\u0000\u0000\u0000\u02d7\u02d9"+ - "\u0003D\u001a\u0000\u02d8\u02d7\u0001\u0000\u0000\u0000\u02d9\u02da\u0001"+ - "\u0000\u0000\u0000\u02da\u02d8\u0001\u0000\u0000\u0000\u02da\u02db\u0001"+ - "\u0000\u0000\u0000\u02db\u02e3\u0001\u0000\u0000\u0000\u02dc\u02e0\u0003"+ - "l.\u0000\u02dd\u02df\u0003D\u001a\u0000\u02de\u02dd\u0001\u0000\u0000"+ - "\u0000\u02df\u02e2\u0001\u0000\u0000\u0000\u02e0\u02de\u0001\u0000\u0000"+ - "\u0000\u02e0\u02e1\u0001\u0000\u0000\u0000\u02e1\u02e4\u0001\u0000\u0000"+ - "\u0000\u02e2\u02e0\u0001\u0000\u0000\u0000\u02e3\u02dc\u0001\u0000\u0000"+ - "\u0000\u02e3\u02e4\u0001\u0000\u0000\u0000\u02e4\u02e5\u0001\u0000\u0000"+ - "\u0000\u02e5\u02e6\u0003L\u001e\u0000\u02e6\u02f0\u0001\u0000\u0000\u0000"+ - "\u02e7\u02e9\u0003l.\u0000\u02e8\u02ea\u0003D\u001a\u0000\u02e9\u02e8"+ - "\u0001\u0000\u0000\u0000\u02ea\u02eb\u0001\u0000\u0000\u0000\u02eb\u02e9"+ - "\u0001\u0000\u0000\u0000\u02eb\u02ec\u0001\u0000\u0000\u0000\u02ec\u02ed"+ - "\u0001\u0000\u0000\u0000\u02ed\u02ee\u0003L\u001e\u0000\u02ee\u02f0\u0001"+ - "\u0000\u0000\u0000\u02ef\u02c6\u0001\u0000\u0000\u0000\u02ef\u02d1\u0001"+ - "\u0000\u0000\u0000\u02ef\u02d8\u0001\u0000\u0000\u0000\u02ef\u02e7\u0001"+ - "\u0000\u0000\u0000\u02f0]\u0001\u0000\u0000\u0000\u02f1\u02f2\u0007\u001e"+ - "\u0000\u0000\u02f2\u02f3\u0007\u001f\u0000\u0000\u02f3_\u0001\u0000\u0000"+ - "\u0000\u02f4\u02f5\u0007\f\u0000\u0000\u02f5\u02f6\u0007\t\u0000\u0000"+ - "\u02f6\u02f7\u0007\u0000\u0000\u0000\u02f7a\u0001\u0000\u0000\u0000\u02f8"+ - "\u02f9\u0007\f\u0000\u0000\u02f9\u02fa\u0007\u0002\u0000\u0000\u02fa\u02fb"+ - "\u0007\u0004\u0000\u0000\u02fbc\u0001\u0000\u0000\u0000\u02fc\u02fd\u0005"+ - "=\u0000\u0000\u02fde\u0001\u0000\u0000\u0000\u02fe\u02ff\u0005:\u0000"+ - "\u0000\u02ff\u0300\u0005:\u0000\u0000\u0300g\u0001\u0000\u0000\u0000\u0301"+ - "\u0302\u0005,\u0000\u0000\u0302i\u0001\u0000\u0000\u0000\u0303\u0304\u0007"+ - "\u0000\u0000\u0000\u0304\u0305\u0007\u0003\u0000\u0000\u0305\u0306\u0007"+ - "\u0002\u0000\u0000\u0306\u0307\u0007\u0004\u0000\u0000\u0307k\u0001\u0000"+ - "\u0000\u0000\u0308\u0309\u0005.\u0000\u0000\u0309m\u0001\u0000\u0000\u0000"+ - "\u030a\u030b\u0007\u000f\u0000\u0000\u030b\u030c\u0007\f\u0000\u0000\u030c"+ - "\u030d\u0007\r\u0000\u0000\u030d\u030e\u0007\u0002\u0000\u0000\u030e\u030f"+ - "\u0007\u0003\u0000\u0000\u030fo\u0001\u0000\u0000\u0000\u0310\u0311\u0007"+ - "\u000f\u0000\u0000\u0311\u0312\u0007\u0001\u0000\u0000\u0312\u0313\u0007"+ - "\u0006\u0000\u0000\u0313\u0314\u0007\u0002\u0000\u0000\u0314\u0315\u0007"+ - "\u0005\u0000\u0000\u0315q\u0001\u0000\u0000\u0000\u0316\u0317\u0007\u0001"+ - "\u0000\u0000\u0317\u0318\u0007\t\u0000\u0000\u0318s\u0001\u0000\u0000"+ - "\u0000\u0319\u031a\u0007\u0001\u0000\u0000\u031a\u031b\u0007\u0002\u0000"+ - "\u0000\u031bu\u0001\u0000\u0000\u0000\u031c\u031d\u0007\r\u0000\u0000"+ - "\u031d\u031e\u0007\f\u0000\u0000\u031e\u031f\u0007\u0002\u0000\u0000\u031f"+ - "\u0320\u0007\u0005\u0000\u0000\u0320w\u0001\u0000\u0000\u0000\u0321\u0322"+ - "\u0007\r\u0000\u0000\u0322\u0323\u0007\u0001\u0000\u0000\u0323\u0324\u0007"+ - "\u0012\u0000\u0000\u0324\u0325\u0007\u0003\u0000\u0000\u0325y\u0001\u0000"+ - "\u0000\u0000\u0326\u0327\u0005(\u0000\u0000\u0327{\u0001\u0000\u0000\u0000"+ - "\u0328\u0329\u0007\t\u0000\u0000\u0329\u032a\u0007\u0007\u0000\u0000\u032a"+ - "\u032b\u0007\u0005\u0000\u0000\u032b}\u0001\u0000\u0000\u0000\u032c\u032d"+ - "\u0007\t\u0000\u0000\u032d\u032e\u0007\u0014\u0000\u0000\u032e\u032f\u0007"+ - "\r\u0000\u0000\u032f\u0330\u0007\r\u0000\u0000\u0330\u007f\u0001\u0000"+ - "\u0000\u0000\u0331\u0332\u0007\t\u0000\u0000\u0332\u0333\u0007\u0014\u0000"+ - "\u0000\u0333\u0334\u0007\r\u0000\u0000\u0334\u0335\u0007\r\u0000\u0000"+ - "\u0335\u0336\u0007\u0002\u0000\u0000\u0336\u0081\u0001\u0000\u0000\u0000"+ - "\u0337\u0338\u0007\u0007\u0000\u0000\u0338\u0339\u0007\u0006\u0000\u0000"+ - "\u0339\u0083\u0001\u0000\u0000\u0000\u033a\u033b\u0005?\u0000\u0000\u033b"+ - "\u0085\u0001\u0000\u0000\u0000\u033c\u033d\u0007\u0006\u0000\u0000\u033d"+ - "\u033e\u0007\r\u0000\u0000\u033e\u033f\u0007\u0001\u0000\u0000\u033f\u0340"+ - "\u0007\u0012\u0000\u0000\u0340\u0341\u0007\u0003\u0000\u0000\u0341\u0087"+ - "\u0001\u0000\u0000\u0000\u0342\u0343\u0005)\u0000\u0000\u0343\u0089\u0001"+ - "\u0000\u0000\u0000\u0344\u0345\u0007\u0005\u0000\u0000\u0345\u0346\u0007"+ - "\u0006\u0000\u0000\u0346\u0347\u0007\u0014\u0000\u0000\u0347\u0348\u0007"+ - "\u0003\u0000\u0000\u0348\u008b\u0001\u0000\u0000\u0000\u0349\u034a\u0005"+ - "=\u0000\u0000\u034a\u034b\u0005=\u0000\u0000\u034b\u008d\u0001\u0000\u0000"+ - "\u0000\u034c\u034d\u0005=\u0000\u0000\u034d\u034e\u0005~\u0000\u0000\u034e"+ - "\u008f\u0001\u0000\u0000\u0000\u034f\u0350\u0005!\u0000\u0000\u0350\u0351"+ - "\u0005=\u0000\u0000\u0351\u0091\u0001\u0000\u0000\u0000\u0352\u0353\u0005"+ - "<\u0000\u0000\u0353\u0093\u0001\u0000\u0000\u0000\u0354\u0355\u0005<\u0000"+ - "\u0000\u0355\u0356\u0005=\u0000\u0000\u0356\u0095\u0001\u0000\u0000\u0000"+ - "\u0357\u0358\u0005>\u0000\u0000\u0358\u0097\u0001\u0000\u0000\u0000\u0359"+ - "\u035a\u0005>\u0000\u0000\u035a\u035b\u0005=\u0000\u0000\u035b\u0099\u0001"+ - "\u0000\u0000\u0000\u035c\u035d\u0005+\u0000\u0000\u035d\u009b\u0001\u0000"+ - "\u0000\u0000\u035e\u035f\u0005-\u0000\u0000\u035f\u009d\u0001\u0000\u0000"+ - "\u0000\u0360\u0361\u0005*\u0000\u0000\u0361\u009f\u0001\u0000\u0000\u0000"+ - "\u0362\u0363\u0005/\u0000\u0000\u0363\u00a1\u0001\u0000\u0000\u0000\u0364"+ - "\u0365\u0005%\u0000\u0000\u0365\u00a3\u0001\u0000\u0000\u0000\u0366\u0367"+ - "\u0004J\u0004\u0000\u0367\u0368\u00036\u0013\u0000\u0368\u0369\u0001\u0000"+ - "\u0000\u0000\u0369\u036a\u0006J\r\u0000\u036a\u00a5\u0001\u0000\u0000"+ - "\u0000\u036b\u036e\u0003\u0084:\u0000\u036c\u036f\u0003F\u001b\u0000\u036d"+ - "\u036f\u0003T\"\u0000\u036e\u036c\u0001\u0000\u0000\u0000\u036e\u036d"+ - "\u0001\u0000\u0000\u0000\u036f\u0373\u0001\u0000\u0000\u0000\u0370\u0372"+ - "\u0003V#\u0000\u0371\u0370\u0001\u0000\u0000\u0000\u0372\u0375\u0001\u0000"+ - "\u0000\u0000\u0373\u0371\u0001\u0000\u0000\u0000\u0373\u0374\u0001\u0000"+ - "\u0000\u0000\u0374\u037d\u0001\u0000\u0000\u0000\u0375\u0373\u0001\u0000"+ - "\u0000\u0000\u0376\u0378\u0003\u0084:\u0000\u0377\u0379\u0003D\u001a\u0000"+ - "\u0378\u0377\u0001\u0000\u0000\u0000\u0379\u037a\u0001\u0000\u0000\u0000"+ - "\u037a\u0378\u0001\u0000\u0000\u0000\u037a\u037b\u0001\u0000\u0000\u0000"+ - "\u037b\u037d\u0001\u0000\u0000\u0000\u037c\u036b\u0001\u0000\u0000\u0000"+ - "\u037c\u0376\u0001\u0000\u0000\u0000\u037d\u00a7\u0001\u0000\u0000\u0000"+ - "\u037e\u037f\u0005[\u0000\u0000\u037f\u0380\u0001\u0000\u0000\u0000\u0380"+ - "\u0381\u0006L\u0000\u0000\u0381\u0382\u0006L\u0000\u0000\u0382\u00a9\u0001"+ - "\u0000\u0000\u0000\u0383\u0384\u0005]\u0000\u0000\u0384\u0385\u0001\u0000"+ - "\u0000\u0000\u0385\u0386\u0006M\f\u0000\u0386\u0387\u0006M\f\u0000\u0387"+ - "\u00ab\u0001\u0000\u0000\u0000\u0388\u038c\u0003F\u001b\u0000\u0389\u038b"+ - "\u0003V#\u0000\u038a\u0389\u0001\u0000\u0000\u0000\u038b\u038e\u0001\u0000"+ - "\u0000\u0000\u038c\u038a\u0001\u0000\u0000\u0000\u038c\u038d\u0001\u0000"+ - "\u0000\u0000\u038d\u0399\u0001\u0000\u0000\u0000\u038e\u038c\u0001\u0000"+ - "\u0000\u0000\u038f\u0392\u0003T\"\u0000\u0390\u0392\u0003N\u001f\u0000"+ - "\u0391\u038f\u0001\u0000\u0000\u0000\u0391\u0390\u0001\u0000\u0000\u0000"+ - "\u0392\u0394\u0001\u0000\u0000\u0000\u0393\u0395\u0003V#\u0000\u0394\u0393"+ - "\u0001\u0000\u0000\u0000\u0395\u0396\u0001\u0000\u0000\u0000\u0396\u0394"+ - "\u0001\u0000\u0000\u0000\u0396\u0397\u0001\u0000\u0000\u0000\u0397\u0399"+ - "\u0001\u0000\u0000\u0000\u0398\u0388\u0001\u0000\u0000\u0000\u0398\u0391"+ - "\u0001\u0000\u0000\u0000\u0399\u00ad\u0001\u0000\u0000\u0000\u039a\u039c"+ - "\u0003P \u0000\u039b\u039d\u0003R!\u0000\u039c\u039b\u0001\u0000\u0000"+ - "\u0000\u039d\u039e\u0001\u0000\u0000\u0000\u039e\u039c\u0001\u0000\u0000"+ - "\u0000\u039e\u039f\u0001\u0000\u0000\u0000\u039f\u03a0\u0001\u0000\u0000"+ - "\u0000\u03a0\u03a1\u0003P \u0000\u03a1\u00af\u0001\u0000\u0000\u0000\u03a2"+ - "\u03a3\u0003\u00aeO\u0000\u03a3\u00b1\u0001\u0000\u0000\u0000\u03a4\u03a5"+ - "\u0003<\u0016\u0000\u03a5\u03a6\u0001\u0000\u0000\u0000\u03a6\u03a7\u0006"+ - "Q\u000b\u0000\u03a7\u00b3\u0001\u0000\u0000\u0000\u03a8\u03a9\u0003>\u0017"+ - "\u0000\u03a9\u03aa\u0001\u0000\u0000\u0000\u03aa\u03ab\u0006R\u000b\u0000"+ - "\u03ab\u00b5\u0001\u0000\u0000\u0000\u03ac\u03ad\u0003@\u0018\u0000\u03ad"+ - "\u03ae\u0001\u0000\u0000\u0000\u03ae\u03af\u0006S\u000b\u0000\u03af\u00b7"+ - "\u0001\u0000\u0000\u0000\u03b0\u03b1\u0003\u00a8L\u0000\u03b1\u03b2\u0001"+ - "\u0000\u0000\u0000\u03b2\u03b3\u0006T\u000e\u0000\u03b3\u03b4\u0006T\u000f"+ - "\u0000\u03b4\u00b9\u0001\u0000\u0000\u0000\u03b5\u03b6\u0003B\u0019\u0000"+ - "\u03b6\u03b7\u0001\u0000\u0000\u0000\u03b7\u03b8\u0006U\u0010\u0000\u03b8"+ - "\u03b9\u0006U\f\u0000\u03b9\u00bb\u0001\u0000\u0000\u0000\u03ba\u03bb"+ - "\u0003@\u0018\u0000\u03bb\u03bc\u0001\u0000\u0000\u0000\u03bc\u03bd\u0006"+ - "V\u000b\u0000\u03bd\u00bd\u0001\u0000\u0000\u0000\u03be\u03bf\u0003<\u0016"+ - "\u0000\u03bf\u03c0\u0001\u0000\u0000\u0000\u03c0\u03c1\u0006W\u000b\u0000"+ - "\u03c1\u00bf\u0001\u0000\u0000\u0000\u03c2\u03c3\u0003>\u0017\u0000\u03c3"+ - "\u03c4\u0001\u0000\u0000\u0000\u03c4\u03c5\u0006X\u000b\u0000\u03c5\u00c1"+ - "\u0001\u0000\u0000\u0000\u03c6\u03c7\u0003B\u0019\u0000\u03c7\u03c8\u0001"+ - "\u0000\u0000\u0000\u03c8\u03c9\u0006Y\u0010\u0000\u03c9\u03ca\u0006Y\f"+ - "\u0000\u03ca\u00c3\u0001\u0000\u0000\u0000\u03cb\u03cc\u0003\u00a8L\u0000"+ - "\u03cc\u03cd\u0001\u0000\u0000\u0000\u03cd\u03ce\u0006Z\u000e\u0000\u03ce"+ - "\u00c5\u0001\u0000\u0000\u0000\u03cf\u03d0\u0003\u00aaM\u0000\u03d0\u03d1"+ - "\u0001\u0000\u0000\u0000\u03d1\u03d2\u0006[\u0011\u0000\u03d2\u00c7\u0001"+ - "\u0000\u0000\u0000\u03d3\u03d4\u0003\u014e\u009f\u0000\u03d4\u03d5\u0001"+ - "\u0000\u0000\u0000\u03d5\u03d6\u0006\\\u0012\u0000\u03d6\u00c9\u0001\u0000"+ - "\u0000\u0000\u03d7\u03d8\u0003h,\u0000\u03d8\u03d9\u0001\u0000\u0000\u0000"+ - "\u03d9\u03da\u0006]\u0013\u0000\u03da\u00cb\u0001\u0000\u0000\u0000\u03db"+ - "\u03dc\u0003d*\u0000\u03dc\u03dd\u0001\u0000\u0000\u0000\u03dd\u03de\u0006"+ - "^\u0014\u0000\u03de\u00cd\u0001\u0000\u0000\u0000\u03df\u03e0\u0007\u0010"+ - "\u0000\u0000\u03e0\u03e1\u0007\u0003\u0000\u0000\u03e1\u03e2\u0007\u0005"+ - "\u0000\u0000\u03e2\u03e3\u0007\f\u0000\u0000\u03e3\u03e4\u0007\u0000\u0000"+ - "\u0000\u03e4\u03e5\u0007\f\u0000\u0000\u03e5\u03e6\u0007\u0005\u0000\u0000"+ - "\u03e6\u03e7\u0007\f\u0000\u0000\u03e7\u00cf\u0001\u0000\u0000\u0000\u03e8"+ - "\u03ec\b \u0000\u0000\u03e9\u03ea\u0005/\u0000\u0000\u03ea\u03ec\b!\u0000"+ - "\u0000\u03eb\u03e8\u0001\u0000\u0000\u0000\u03eb\u03e9\u0001\u0000\u0000"+ - "\u0000\u03ec\u00d1\u0001\u0000\u0000\u0000\u03ed\u03ef\u0003\u00d0`\u0000"+ - "\u03ee\u03ed\u0001\u0000\u0000\u0000\u03ef\u03f0\u0001\u0000\u0000\u0000"+ - "\u03f0\u03ee\u0001\u0000\u0000\u0000\u03f0\u03f1\u0001\u0000\u0000\u0000"+ - "\u03f1\u00d3\u0001\u0000\u0000\u0000\u03f2\u03f3\u0003\u00d2a\u0000\u03f3"+ - "\u03f4\u0001\u0000\u0000\u0000\u03f4\u03f5\u0006b\u0015\u0000\u03f5\u00d5"+ - "\u0001\u0000\u0000\u0000\u03f6\u03f7\u0003X$\u0000\u03f7\u03f8\u0001\u0000"+ - "\u0000\u0000\u03f8\u03f9\u0006c\u0016\u0000\u03f9\u00d7\u0001\u0000\u0000"+ - "\u0000\u03fa\u03fb\u0003<\u0016\u0000\u03fb\u03fc\u0001\u0000\u0000\u0000"+ - "\u03fc\u03fd\u0006d\u000b\u0000\u03fd\u00d9\u0001\u0000\u0000\u0000\u03fe"+ - "\u03ff\u0003>\u0017\u0000\u03ff\u0400\u0001\u0000\u0000\u0000\u0400\u0401"+ - "\u0006e\u000b\u0000\u0401\u00db\u0001\u0000\u0000\u0000\u0402\u0403\u0003"+ - "@\u0018\u0000\u0403\u0404\u0001\u0000\u0000\u0000\u0404\u0405\u0006f\u000b"+ - "\u0000\u0405\u00dd\u0001\u0000\u0000\u0000\u0406\u0407\u0003B\u0019\u0000"+ - "\u0407\u0408\u0001\u0000\u0000\u0000\u0408\u0409\u0006g\u0010\u0000\u0409"+ - "\u040a\u0006g\f\u0000\u040a\u00df\u0001\u0000\u0000\u0000\u040b\u040c"+ - "\u0003l.\u0000\u040c\u040d\u0001\u0000\u0000\u0000\u040d\u040e\u0006h"+ - "\u0017\u0000\u040e\u00e1\u0001\u0000\u0000\u0000\u040f\u0410\u0003h,\u0000"+ - "\u0410\u0411\u0001\u0000\u0000\u0000\u0411\u0412\u0006i\u0013\u0000\u0412"+ - "\u00e3\u0001\u0000\u0000\u0000\u0413\u0418\u0003F\u001b\u0000\u0414\u0418"+ - "\u0003D\u001a\u0000\u0415\u0418\u0003T\"\u0000\u0416\u0418\u0003\u009e"+ - "G\u0000\u0417\u0413\u0001\u0000\u0000\u0000\u0417\u0414\u0001\u0000\u0000"+ - "\u0000\u0417\u0415\u0001\u0000\u0000\u0000\u0417\u0416\u0001\u0000\u0000"+ - "\u0000\u0418\u00e5\u0001\u0000\u0000\u0000\u0419\u041c\u0003F\u001b\u0000"+ - "\u041a\u041c\u0003\u009eG\u0000\u041b\u0419\u0001\u0000\u0000\u0000\u041b"+ - "\u041a\u0001\u0000\u0000\u0000\u041c\u0420\u0001\u0000\u0000\u0000\u041d"+ - "\u041f\u0003\u00e4j\u0000\u041e\u041d\u0001\u0000\u0000\u0000\u041f\u0422"+ - "\u0001\u0000\u0000\u0000\u0420\u041e\u0001\u0000\u0000\u0000\u0420\u0421"+ - "\u0001\u0000\u0000\u0000\u0421\u042d\u0001\u0000\u0000\u0000\u0422\u0420"+ - "\u0001\u0000\u0000\u0000\u0423\u0426\u0003T\"\u0000\u0424\u0426\u0003"+ - "N\u001f\u0000\u0425\u0423\u0001\u0000\u0000\u0000\u0425\u0424\u0001\u0000"+ - "\u0000\u0000\u0426\u0428\u0001\u0000\u0000\u0000\u0427\u0429\u0003\u00e4"+ - "j\u0000\u0428\u0427\u0001\u0000\u0000\u0000\u0429\u042a\u0001\u0000\u0000"+ - "\u0000\u042a\u0428\u0001\u0000\u0000\u0000\u042a\u042b\u0001\u0000\u0000"+ - "\u0000\u042b\u042d\u0001\u0000\u0000\u0000\u042c\u041b\u0001\u0000\u0000"+ - "\u0000\u042c\u0425\u0001\u0000\u0000\u0000\u042d\u00e7\u0001\u0000\u0000"+ - "\u0000\u042e\u0431\u0003\u00e6k\u0000\u042f\u0431\u0003\u00aeO\u0000\u0430"+ - "\u042e\u0001\u0000\u0000\u0000\u0430\u042f\u0001\u0000\u0000\u0000\u0431"+ - "\u0432\u0001\u0000\u0000\u0000\u0432\u0430\u0001\u0000\u0000\u0000\u0432"+ - "\u0433\u0001\u0000\u0000\u0000\u0433\u00e9\u0001\u0000\u0000\u0000\u0434"+ - "\u0435\u0003<\u0016\u0000\u0435\u0436\u0001\u0000\u0000\u0000\u0436\u0437"+ - "\u0006m\u000b\u0000\u0437\u00eb\u0001\u0000\u0000\u0000\u0438\u0439\u0003"+ - ">\u0017\u0000\u0439\u043a\u0001\u0000\u0000\u0000\u043a\u043b\u0006n\u000b"+ - "\u0000\u043b\u00ed\u0001\u0000\u0000\u0000\u043c\u043d\u0003@\u0018\u0000"+ - "\u043d\u043e\u0001\u0000\u0000\u0000\u043e\u043f\u0006o\u000b\u0000\u043f"+ - "\u00ef\u0001\u0000\u0000\u0000\u0440\u0441\u0003B\u0019\u0000\u0441\u0442"+ - "\u0001\u0000\u0000\u0000\u0442\u0443\u0006p\u0010\u0000\u0443\u0444\u0006"+ - "p\f\u0000\u0444\u00f1\u0001\u0000\u0000\u0000\u0445\u0446\u0003d*\u0000"+ - "\u0446\u0447\u0001\u0000\u0000\u0000\u0447\u0448\u0006q\u0014\u0000\u0448"+ - "\u00f3\u0001\u0000\u0000\u0000\u0449\u044a\u0003h,\u0000\u044a\u044b\u0001"+ - "\u0000\u0000\u0000\u044b\u044c\u0006r\u0013\u0000\u044c\u00f5\u0001\u0000"+ - "\u0000\u0000\u044d\u044e\u0003l.\u0000\u044e\u044f\u0001\u0000\u0000\u0000"+ - "\u044f\u0450\u0006s\u0017\u0000\u0450\u00f7\u0001\u0000\u0000\u0000\u0451"+ - "\u0452\u0007\f\u0000\u0000\u0452\u0453\u0007\u0002\u0000\u0000\u0453\u00f9"+ - "\u0001\u0000\u0000\u0000\u0454\u0455\u0003\u00e8l\u0000\u0455\u0456\u0001"+ - "\u0000\u0000\u0000\u0456\u0457\u0006u\u0018\u0000\u0457\u00fb\u0001\u0000"+ - "\u0000\u0000\u0458\u0459\u0003<\u0016\u0000\u0459\u045a\u0001\u0000\u0000"+ - "\u0000\u045a\u045b\u0006v\u000b\u0000\u045b\u00fd\u0001\u0000\u0000\u0000"+ - "\u045c\u045d\u0003>\u0017\u0000\u045d\u045e\u0001\u0000\u0000\u0000\u045e"+ - "\u045f\u0006w\u000b\u0000\u045f\u00ff\u0001\u0000\u0000\u0000\u0460\u0461"+ - "\u0003@\u0018\u0000\u0461\u0462\u0001\u0000\u0000\u0000\u0462\u0463\u0006"+ - "x\u000b\u0000\u0463\u0101\u0001\u0000\u0000\u0000\u0464\u0465\u0003B\u0019"+ - "\u0000\u0465\u0466\u0001\u0000\u0000\u0000\u0466\u0467\u0006y\u0010\u0000"+ - "\u0467\u0468\u0006y\f\u0000\u0468\u0103\u0001\u0000\u0000\u0000\u0469"+ - "\u046a\u0003\u00a8L\u0000\u046a\u046b\u0001\u0000\u0000\u0000\u046b\u046c"+ - "\u0006z\u000e\u0000\u046c\u046d\u0006z\u0019\u0000\u046d\u0105\u0001\u0000"+ - "\u0000\u0000\u046e\u046f\u0007\u0007\u0000\u0000\u046f\u0470\u0007\t\u0000"+ - "\u0000\u0470\u0471\u0001\u0000\u0000\u0000\u0471\u0472\u0006{\u001a\u0000"+ - "\u0472\u0107\u0001\u0000\u0000\u0000\u0473\u0474\u0007\u0013\u0000\u0000"+ - "\u0474\u0475\u0007\u0001\u0000\u0000\u0475\u0476\u0007\u0005\u0000\u0000"+ - "\u0476\u0477\u0007\n\u0000\u0000\u0477\u0478\u0001\u0000\u0000\u0000\u0478"+ - "\u0479\u0006|\u001a\u0000\u0479\u0109\u0001\u0000\u0000\u0000\u047a\u047b"+ - "\b\"\u0000\u0000\u047b\u010b\u0001\u0000\u0000\u0000\u047c\u047e\u0003"+ - "\u010a}\u0000\u047d\u047c\u0001\u0000\u0000\u0000\u047e\u047f\u0001\u0000"+ - "\u0000\u0000\u047f\u047d\u0001\u0000\u0000\u0000\u047f\u0480\u0001\u0000"+ - "\u0000\u0000\u0480\u0481\u0001\u0000\u0000\u0000\u0481\u0482\u0003\u014e"+ - "\u009f\u0000\u0482\u0484\u0001\u0000\u0000\u0000\u0483\u047d\u0001\u0000"+ - "\u0000\u0000\u0483\u0484\u0001\u0000\u0000\u0000\u0484\u0486\u0001\u0000"+ - "\u0000\u0000\u0485\u0487\u0003\u010a}\u0000\u0486\u0485\u0001\u0000\u0000"+ - "\u0000\u0487\u0488\u0001\u0000\u0000\u0000\u0488\u0486\u0001\u0000\u0000"+ - "\u0000\u0488\u0489\u0001\u0000\u0000\u0000\u0489\u010d\u0001\u0000\u0000"+ - "\u0000\u048a\u048b\u0003\u010c~\u0000\u048b\u048c\u0001\u0000\u0000\u0000"+ - "\u048c\u048d\u0006\u007f\u001b\u0000\u048d\u010f\u0001\u0000\u0000\u0000"+ - "\u048e\u048f\u0003<\u0016\u0000\u048f\u0490\u0001\u0000\u0000\u0000\u0490"+ - "\u0491\u0006\u0080\u000b\u0000\u0491\u0111\u0001\u0000\u0000\u0000\u0492"+ - "\u0493\u0003>\u0017\u0000\u0493\u0494\u0001\u0000\u0000\u0000\u0494\u0495"+ - "\u0006\u0081\u000b\u0000\u0495\u0113\u0001\u0000\u0000\u0000\u0496\u0497"+ - "\u0003@\u0018\u0000\u0497\u0498\u0001\u0000\u0000\u0000\u0498\u0499\u0006"+ - "\u0082\u000b\u0000\u0499\u0115\u0001\u0000\u0000\u0000\u049a\u049b\u0003"+ - "B\u0019\u0000\u049b\u049c\u0001\u0000\u0000\u0000\u049c\u049d\u0006\u0083"+ - "\u0010\u0000\u049d\u049e\u0006\u0083\f\u0000\u049e\u049f\u0006\u0083\f"+ - "\u0000\u049f\u0117\u0001\u0000\u0000\u0000\u04a0\u04a1\u0003d*\u0000\u04a1"+ - "\u04a2\u0001\u0000\u0000\u0000\u04a2\u04a3\u0006\u0084\u0014\u0000\u04a3"+ - "\u0119\u0001\u0000\u0000\u0000\u04a4\u04a5\u0003h,\u0000\u04a5\u04a6\u0001"+ - "\u0000\u0000\u0000\u04a6\u04a7\u0006\u0085\u0013\u0000\u04a7\u011b\u0001"+ - "\u0000\u0000\u0000\u04a8\u04a9\u0003l.\u0000\u04a9\u04aa\u0001\u0000\u0000"+ - "\u0000\u04aa\u04ab\u0006\u0086\u0017\u0000\u04ab\u011d\u0001\u0000\u0000"+ - "\u0000\u04ac\u04ad\u0003\u0108|\u0000\u04ad\u04ae\u0001\u0000\u0000\u0000"+ - "\u04ae\u04af\u0006\u0087\u001c\u0000\u04af\u011f\u0001\u0000\u0000\u0000"+ - "\u04b0\u04b1\u0003\u00e8l\u0000\u04b1\u04b2\u0001\u0000\u0000\u0000\u04b2"+ - "\u04b3\u0006\u0088\u0018\u0000\u04b3\u0121\u0001\u0000\u0000\u0000\u04b4"+ - "\u04b5\u0003\u00b0P\u0000\u04b5\u04b6\u0001\u0000\u0000\u0000\u04b6\u04b7"+ - "\u0006\u0089\u001d\u0000\u04b7\u0123\u0001\u0000\u0000\u0000\u04b8\u04b9"+ - "\u0003<\u0016\u0000\u04b9\u04ba\u0001\u0000\u0000\u0000\u04ba\u04bb\u0006"+ - "\u008a\u000b\u0000\u04bb\u0125\u0001\u0000\u0000\u0000\u04bc\u04bd\u0003"+ - ">\u0017\u0000\u04bd\u04be\u0001\u0000\u0000\u0000\u04be\u04bf\u0006\u008b"+ - "\u000b\u0000\u04bf\u0127\u0001\u0000\u0000\u0000\u04c0\u04c1\u0003@\u0018"+ - "\u0000\u04c1\u04c2\u0001\u0000\u0000\u0000\u04c2\u04c3\u0006\u008c\u000b"+ - "\u0000\u04c3\u0129\u0001\u0000\u0000\u0000\u04c4\u04c5\u0003B\u0019\u0000"+ - "\u04c5\u04c6\u0001\u0000\u0000\u0000\u04c6\u04c7\u0006\u008d\u0010\u0000"+ - "\u04c7\u04c8\u0006\u008d\f\u0000\u04c8\u012b\u0001\u0000\u0000\u0000\u04c9"+ - "\u04ca\u0003l.\u0000\u04ca\u04cb\u0001\u0000\u0000\u0000\u04cb\u04cc\u0006"+ - "\u008e\u0017\u0000\u04cc\u012d\u0001\u0000\u0000\u0000\u04cd\u04ce\u0003"+ - "\u00b0P\u0000\u04ce\u04cf\u0001\u0000\u0000\u0000\u04cf\u04d0\u0006\u008f"+ - "\u001d\u0000\u04d0\u012f\u0001\u0000\u0000\u0000\u04d1\u04d2\u0003\u00ac"+ - "N\u0000\u04d2\u04d3\u0001\u0000\u0000\u0000\u04d3\u04d4\u0006\u0090\u001e"+ - "\u0000\u04d4\u0131\u0001\u0000\u0000\u0000\u04d5\u04d6\u0003<\u0016\u0000"+ - "\u04d6\u04d7\u0001\u0000\u0000\u0000\u04d7\u04d8\u0006\u0091\u000b\u0000"+ - "\u04d8\u0133\u0001\u0000\u0000\u0000\u04d9\u04da\u0003>\u0017\u0000\u04da"+ - "\u04db\u0001\u0000\u0000\u0000\u04db\u04dc\u0006\u0092\u000b\u0000\u04dc"+ - "\u0135\u0001\u0000\u0000\u0000\u04dd\u04de\u0003@\u0018\u0000\u04de\u04df"+ - "\u0001\u0000\u0000\u0000\u04df\u04e0\u0006\u0093\u000b\u0000\u04e0\u0137"+ - "\u0001\u0000\u0000\u0000\u04e1\u04e2\u0003B\u0019\u0000\u04e2\u04e3\u0001"+ - "\u0000\u0000\u0000\u04e3\u04e4\u0006\u0094\u0010\u0000\u04e4\u04e5\u0006"+ - "\u0094\f\u0000\u04e5\u0139\u0001\u0000\u0000\u0000\u04e6\u04e7\u0007\u0001"+ - "\u0000\u0000\u04e7\u04e8\u0007\t\u0000\u0000\u04e8\u04e9\u0007\u000f\u0000"+ - "\u0000\u04e9\u04ea\u0007\u0007\u0000\u0000\u04ea\u013b\u0001\u0000\u0000"+ - "\u0000\u04eb\u04ec\u0003<\u0016\u0000\u04ec\u04ed\u0001\u0000\u0000\u0000"+ - "\u04ed\u04ee\u0006\u0096\u000b\u0000\u04ee\u013d\u0001\u0000\u0000\u0000"+ - "\u04ef\u04f0\u0003>\u0017\u0000\u04f0\u04f1\u0001\u0000\u0000\u0000\u04f1"+ - "\u04f2\u0006\u0097\u000b\u0000\u04f2\u013f\u0001\u0000\u0000\u0000\u04f3"+ - "\u04f4\u0003@\u0018\u0000\u04f4\u04f5\u0001\u0000\u0000\u0000\u04f5\u04f6"+ - "\u0006\u0098\u000b\u0000\u04f6\u0141\u0001\u0000\u0000\u0000\u04f7\u04f8"+ - "\u0003B\u0019\u0000\u04f8\u04f9\u0001\u0000\u0000\u0000\u04f9\u04fa\u0006"+ - "\u0099\u0010\u0000\u04fa\u04fb\u0006\u0099\f\u0000\u04fb\u0143\u0001\u0000"+ - "\u0000\u0000\u04fc\u04fd\u0007\u000f\u0000\u0000\u04fd\u04fe\u0007\u0014"+ - "\u0000\u0000\u04fe\u04ff\u0007\t\u0000\u0000\u04ff\u0500\u0007\u0004\u0000"+ - "\u0000\u0500\u0501\u0007\u0005\u0000\u0000\u0501\u0502\u0007\u0001\u0000"+ - "\u0000\u0502\u0503\u0007\u0007\u0000\u0000\u0503\u0504\u0007\t\u0000\u0000"+ - "\u0504\u0505\u0007\u0002\u0000\u0000\u0505\u0145\u0001\u0000\u0000\u0000"+ - "\u0506\u0507\u0003<\u0016\u0000\u0507\u0508\u0001\u0000\u0000\u0000\u0508"+ - "\u0509\u0006\u009b\u000b\u0000\u0509\u0147\u0001\u0000\u0000\u0000\u050a"+ - "\u050b\u0003>\u0017\u0000\u050b\u050c\u0001\u0000\u0000\u0000\u050c\u050d"+ - "\u0006\u009c\u000b\u0000\u050d\u0149\u0001\u0000\u0000\u0000\u050e\u050f"+ - "\u0003@\u0018\u0000\u050f\u0510\u0001\u0000\u0000\u0000\u0510\u0511\u0006"+ - "\u009d\u000b\u0000\u0511\u014b\u0001\u0000\u0000\u0000\u0512\u0513\u0003"+ - "\u00aaM\u0000\u0513\u0514\u0001\u0000\u0000\u0000\u0514\u0515\u0006\u009e"+ - "\u0011\u0000\u0515\u0516\u0006\u009e\f\u0000\u0516\u014d\u0001\u0000\u0000"+ - "\u0000\u0517\u0518\u0005:\u0000\u0000\u0518\u014f\u0001\u0000\u0000\u0000"+ - "\u0519\u051f\u0003N\u001f\u0000\u051a\u051f\u0003D\u001a\u0000\u051b\u051f"+ - "\u0003l.\u0000\u051c\u051f\u0003F\u001b\u0000\u051d\u051f\u0003T\"\u0000"+ - "\u051e\u0519\u0001\u0000\u0000\u0000\u051e\u051a\u0001\u0000\u0000\u0000"+ - "\u051e\u051b\u0001\u0000\u0000\u0000\u051e\u051c\u0001\u0000\u0000\u0000"+ - "\u051e\u051d\u0001\u0000\u0000\u0000\u051f\u0520\u0001\u0000\u0000\u0000"+ - "\u0520\u051e\u0001\u0000\u0000\u0000\u0520\u0521\u0001\u0000\u0000\u0000"+ - "\u0521\u0151\u0001\u0000\u0000\u0000\u0522\u0523\u0003<\u0016\u0000\u0523"+ - "\u0524\u0001\u0000\u0000\u0000\u0524\u0525\u0006\u00a1\u000b\u0000\u0525"+ - "\u0153\u0001\u0000\u0000\u0000\u0526\u0527\u0003>\u0017\u0000\u0527\u0528"+ - "\u0001\u0000\u0000\u0000\u0528\u0529\u0006\u00a2\u000b\u0000\u0529\u0155"+ - "\u0001\u0000\u0000\u0000\u052a\u052b\u0003@\u0018\u0000\u052b\u052c\u0001"+ - "\u0000\u0000\u0000\u052c\u052d\u0006\u00a3\u000b\u0000\u052d\u0157\u0001"+ - "\u0000\u0000\u0000\u052e\u052f\u0003B\u0019\u0000\u052f\u0530\u0001\u0000"+ - "\u0000\u0000\u0530\u0531\u0006\u00a4\u0010\u0000\u0531\u0532\u0006\u00a4"+ - "\f\u0000\u0532\u0159\u0001\u0000\u0000\u0000\u0533\u0534\u0003\u014e\u009f"+ - "\u0000\u0534\u0535\u0001\u0000\u0000\u0000\u0535\u0536\u0006\u00a5\u0012"+ - "\u0000\u0536\u015b\u0001\u0000\u0000\u0000\u0537\u0538\u0003h,\u0000\u0538"+ - "\u0539\u0001\u0000\u0000\u0000\u0539\u053a\u0006\u00a6\u0013\u0000\u053a"+ - "\u015d\u0001\u0000\u0000\u0000\u053b\u053c\u0003l.\u0000\u053c\u053d\u0001"+ - "\u0000\u0000\u0000\u053d\u053e\u0006\u00a7\u0017\u0000\u053e\u015f\u0001"+ - "\u0000\u0000\u0000\u053f\u0540\u0003\u0106{\u0000\u0540\u0541\u0001\u0000"+ - "\u0000\u0000\u0541\u0542\u0006\u00a8\u001f\u0000\u0542\u0543\u0006\u00a8"+ - " \u0000\u0543\u0161\u0001\u0000\u0000\u0000\u0544\u0545\u0003\u00d2a\u0000"+ - "\u0545\u0546\u0001\u0000\u0000\u0000\u0546\u0547\u0006\u00a9\u0015\u0000"+ - "\u0547\u0163\u0001\u0000\u0000\u0000\u0548\u0549\u0003X$\u0000\u0549\u054a"+ - "\u0001\u0000\u0000\u0000\u054a\u054b\u0006\u00aa\u0016\u0000\u054b\u0165"+ - "\u0001\u0000\u0000\u0000\u054c\u054d\u0003<\u0016\u0000\u054d\u054e\u0001"+ - "\u0000\u0000\u0000\u054e\u054f\u0006\u00ab\u000b\u0000\u054f\u0167\u0001"+ - "\u0000\u0000\u0000\u0550\u0551\u0003>\u0017\u0000\u0551\u0552\u0001\u0000"+ - "\u0000\u0000\u0552\u0553\u0006\u00ac\u000b\u0000\u0553\u0169\u0001\u0000"+ - "\u0000\u0000\u0554\u0555\u0003@\u0018\u0000\u0555\u0556\u0001\u0000\u0000"+ - "\u0000\u0556\u0557\u0006\u00ad\u000b\u0000\u0557\u016b\u0001\u0000\u0000"+ - "\u0000\u0558\u0559\u0003B\u0019\u0000\u0559\u055a\u0001\u0000\u0000\u0000"+ - "\u055a\u055b\u0006\u00ae\u0010\u0000\u055b\u055c\u0006\u00ae\f\u0000\u055c"+ - "\u055d\u0006\u00ae\f\u0000\u055d\u016d\u0001\u0000\u0000\u0000\u055e\u055f"+ - "\u0003h,\u0000\u055f\u0560\u0001\u0000\u0000\u0000\u0560\u0561\u0006\u00af"+ - "\u0013\u0000\u0561\u016f\u0001\u0000\u0000\u0000\u0562\u0563\u0003l.\u0000"+ - "\u0563\u0564\u0001\u0000\u0000\u0000\u0564\u0565\u0006\u00b0\u0017\u0000"+ - "\u0565\u0171\u0001\u0000\u0000\u0000\u0566\u0567\u0003\u00e8l\u0000\u0567"+ - "\u0568\u0001\u0000\u0000\u0000\u0568\u0569\u0006\u00b1\u0018\u0000\u0569"+ - "\u0173\u0001\u0000\u0000\u0000\u056a\u056b\u0003<\u0016\u0000\u056b\u056c"+ - "\u0001\u0000\u0000\u0000\u056c\u056d\u0006\u00b2\u000b\u0000\u056d\u0175"+ - "\u0001\u0000\u0000\u0000\u056e\u056f\u0003>\u0017\u0000\u056f\u0570\u0001"+ - "\u0000\u0000\u0000\u0570\u0571\u0006\u00b3\u000b\u0000\u0571\u0177\u0001"+ - "\u0000\u0000\u0000\u0572\u0573\u0003@\u0018\u0000\u0573\u0574\u0001\u0000"+ - "\u0000\u0000\u0574\u0575\u0006\u00b4\u000b\u0000\u0575\u0179\u0001\u0000"+ - "\u0000\u0000\u0576\u0577\u0003B\u0019\u0000\u0577\u0578\u0001\u0000\u0000"+ - "\u0000\u0578\u0579\u0006\u00b5\u0010\u0000\u0579\u057a\u0006\u00b5\f\u0000"+ - "\u057a\u017b\u0001\u0000\u0000\u0000\u057b\u057c\u0003\u00d2a\u0000\u057c"+ - "\u057d\u0001\u0000\u0000\u0000\u057d\u057e\u0006\u00b6\u0015\u0000\u057e"+ - "\u057f\u0006\u00b6\f\u0000\u057f\u0580\u0006\u00b6!\u0000\u0580\u017d"+ - "\u0001\u0000\u0000\u0000\u0581\u0582\u0003X$\u0000\u0582\u0583\u0001\u0000"+ - "\u0000\u0000\u0583\u0584\u0006\u00b7\u0016\u0000\u0584\u0585\u0006\u00b7"+ - "\f\u0000\u0585\u0586\u0006\u00b7!\u0000\u0586\u017f\u0001\u0000\u0000"+ - "\u0000\u0587\u0588\u0003<\u0016\u0000\u0588\u0589\u0001\u0000\u0000\u0000"+ - "\u0589\u058a\u0006\u00b8\u000b\u0000\u058a\u0181\u0001\u0000\u0000\u0000"+ - "\u058b\u058c\u0003>\u0017\u0000\u058c\u058d\u0001\u0000\u0000\u0000\u058d"+ - "\u058e\u0006\u00b9\u000b\u0000\u058e\u0183\u0001\u0000\u0000\u0000\u058f"+ - "\u0590\u0003@\u0018\u0000\u0590\u0591\u0001\u0000\u0000\u0000\u0591\u0592"+ - "\u0006\u00ba\u000b\u0000\u0592\u0185\u0001\u0000\u0000\u0000\u0593\u0594"+ - "\u0003\u014e\u009f\u0000\u0594\u0595\u0001\u0000\u0000\u0000\u0595\u0596"+ - "\u0006\u00bb\u0012\u0000\u0596\u0597\u0006\u00bb\f\u0000\u0597\u0598\u0006"+ - "\u00bb\n\u0000\u0598\u0187\u0001\u0000\u0000\u0000\u0599\u059a\u0003h"+ - ",\u0000\u059a\u059b\u0001\u0000\u0000\u0000\u059b\u059c\u0006\u00bc\u0013"+ - "\u0000\u059c\u059d\u0006\u00bc\f\u0000\u059d\u059e\u0006\u00bc\n\u0000"+ - "\u059e\u0189\u0001\u0000\u0000\u0000\u059f\u05a0\u0003<\u0016\u0000\u05a0"+ - "\u05a1\u0001\u0000\u0000\u0000\u05a1\u05a2\u0006\u00bd\u000b\u0000\u05a2"+ - "\u018b\u0001\u0000\u0000\u0000\u05a3\u05a4\u0003>\u0017\u0000\u05a4\u05a5"+ - "\u0001\u0000\u0000\u0000\u05a5\u05a6\u0006\u00be\u000b\u0000\u05a6\u018d"+ - "\u0001\u0000\u0000\u0000\u05a7\u05a8\u0003@\u0018\u0000\u05a8\u05a9\u0001"+ - "\u0000\u0000\u0000\u05a9\u05aa\u0006\u00bf\u000b\u0000\u05aa\u018f\u0001"+ - "\u0000\u0000\u0000\u05ab\u05ac\u0003\u00b0P\u0000\u05ac\u05ad\u0001\u0000"+ - "\u0000\u0000\u05ad\u05ae\u0006\u00c0\f\u0000\u05ae\u05af\u0006\u00c0\u0000"+ - "\u0000\u05af\u05b0\u0006\u00c0\u001d\u0000\u05b0\u0191\u0001\u0000\u0000"+ - "\u0000\u05b1\u05b2\u0003\u00acN\u0000\u05b2\u05b3\u0001\u0000\u0000\u0000"+ - "\u05b3\u05b4\u0006\u00c1\f\u0000\u05b4\u05b5\u0006\u00c1\u0000\u0000\u05b5"+ - "\u05b6\u0006\u00c1\u001e\u0000\u05b6\u0193\u0001\u0000\u0000\u0000\u05b7"+ - "\u05b8\u0003^\'\u0000\u05b8\u05b9\u0001\u0000\u0000\u0000\u05b9\u05ba"+ - "\u0006\u00c2\f\u0000\u05ba\u05bb\u0006\u00c2\u0000\u0000\u05bb\u05bc\u0006"+ - "\u00c2\"\u0000\u05bc\u0195\u0001\u0000\u0000\u0000\u05bd\u05be\u0003B"+ - "\u0019\u0000\u05be\u05bf\u0001\u0000\u0000\u0000\u05bf\u05c0\u0006\u00c3"+ - "\u0010\u0000\u05c0\u05c1\u0006\u00c3\f\u0000\u05c1\u0197\u0001\u0000\u0000"+ - "\u0000B\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f"+ - "\r\u000e\u000f\u0250\u025a\u025e\u0261\u026a\u026c\u0277\u028a\u028f\u0298"+ - "\u029f\u02a4\u02a6\u02b1\u02b9\u02bc\u02be\u02c3\u02c8\u02ce\u02d5\u02da"+ - "\u02e0\u02e3\u02eb\u02ef\u036e\u0373\u037a\u037c\u038c\u0391\u0396\u0398"+ - "\u039e\u03eb\u03f0\u0417\u041b\u0420\u0425\u042a\u042c\u0430\u0432\u047f"+ - "\u0483\u0488\u051e\u0520#\u0005\u0001\u0000\u0005\u0004\u0000\u0005\u0006"+ - "\u0000\u0005\u0002\u0000\u0005\u0003\u0000\u0005\n\u0000\u0005\b\u0000"+ - "\u0005\u0005\u0000\u0005\t\u0000\u0005\f\u0000\u0005\u000e\u0000\u0000"+ - "\u0001\u0000\u0004\u0000\u0000\u0007\u0014\u0000\u0007B\u0000\u0005\u0000"+ - "\u0000\u0007\u001a\u0000\u0007C\u0000\u0007m\u0000\u0007#\u0000\u0007"+ - "!\u0000\u0007M\u0000\u0007\u001b\u0000\u0007%\u0000\u0007Q\u0000\u0005"+ - "\u000b\u0000\u0005\u0007\u0000\u0007[\u0000\u0007Z\u0000\u0007E\u0000"+ - "\u0007D\u0000\u0007Y\u0000\u0005\r\u0000\u0005\u000f\u0000\u0007\u001e"+ - "\u0000"; + "i\u0003i\u0404\bi\u0001j\u0001j\u0003j\u0408\bj\u0001j\u0005j\u040b\b"+ + "j\nj\fj\u040e\tj\u0001j\u0001j\u0003j\u0412\bj\u0001j\u0004j\u0415\bj"+ + "\u000bj\fj\u0416\u0003j\u0419\bj\u0001k\u0001k\u0004k\u041d\bk\u000bk"+ + "\fk\u041e\u0001l\u0001l\u0001l\u0001l\u0001m\u0001m\u0001m\u0001m\u0001"+ + "n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001o\u0001p\u0001"+ + "p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001r\u0001r\u0001r\u0001"+ + "r\u0001s\u0001s\u0001s\u0001t\u0001t\u0001t\u0001t\u0001u\u0001u\u0001"+ + "u\u0001u\u0001v\u0001v\u0001v\u0001v\u0001w\u0001w\u0001w\u0001w\u0001"+ + "x\u0001x\u0001x\u0001x\u0001x\u0001y\u0001y\u0001y\u0001y\u0001y\u0001"+ + "z\u0001z\u0001z\u0001z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001{\u0001"+ + "{\u0001{\u0001|\u0001|\u0001}\u0004}\u046a\b}\u000b}\f}\u046b\u0001}\u0001"+ + "}\u0003}\u0470\b}\u0001}\u0004}\u0473\b}\u000b}\f}\u0474\u0001~\u0001"+ + "~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001"+ + "\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0081\u0001\u0081\u0001"+ + "\u0081\u0001\u0081\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001"+ + "\u0082\u0001\u0082\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0083\u0001"+ + "\u0084\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0085\u0001\u0085\u0001"+ + "\u0085\u0001\u0085\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001"+ + "\u0087\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0088\u0001\u0088\u0001"+ + "\u0088\u0001\u0088\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001"+ + "\u008a\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b\u0001"+ + "\u008b\u0001\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001"+ + "\u008c\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008e\u0001"+ + "\u008e\u0001\u008e\u0001\u008e\u0001\u008f\u0001\u008f\u0001\u008f\u0001"+ + "\u008f\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0091\u0001"+ + "\u0091\u0001\u0091\u0001\u0091\u0001\u0092\u0001\u0092\u0001\u0092\u0001"+ + "\u0092\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001"+ + "\u0094\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0095\u0001"+ + "\u0095\u0001\u0095\u0001\u0095\u0001\u0096\u0001\u0096\u0001\u0096\u0001"+ + "\u0096\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0098\u0001"+ + "\u0098\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0099\u0001\u0099\u0001"+ + "\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0004\u009a\u04f0"+ + "\b\u009a\u000b\u009a\f\u009a\u04f1\u0001\u009b\u0001\u009b\u0001\u009b"+ + "\u0001\u009b\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009d"+ + "\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009e\u0001\u009e\u0001\u009e"+ + "\u0001\u009e\u0001\u009e\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u009f"+ + "\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a1\u0001\u00a1"+ + "\u0001\u00a1\u0001\u00a1\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a2"+ + "\u0001\u00a2\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001\u00a4"+ + "\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a5\u0001\u00a5\u0001\u00a5"+ + "\u0001\u00a5\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a7"+ + "\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a8\u0001\u00a8\u0001\u00a8"+ + "\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a9\u0001\u00a9\u0001\u00a9"+ + "\u0001\u00a9\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00ab"+ + "\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ac\u0001\u00ac\u0001\u00ac"+ + "\u0001\u00ac\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ae"+ + "\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00af\u0001\u00af\u0001\u00af"+ + "\u0001\u00af\u0001\u00af\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001\u00b0"+ + "\u0001\u00b0\u0001\u00b0\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b1"+ + "\u0001\u00b1\u0001\u00b1\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b2"+ + "\u0001\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b3\u0001\u00b4\u0001\u00b4"+ + "\u0001\u00b4\u0001\u00b4\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5"+ + "\u0001\u00b5\u0001\u00b5\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b6"+ + "\u0001\u00b6\u0001\u00b6\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b7"+ + "\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b9\u0001\u00b9"+ + "\u0001\u00b9\u0001\u00b9\u0001\u00ba\u0001\u00ba\u0001\u00ba\u0001\u00ba"+ + "\u0001\u00ba\u0001\u00ba\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb"+ + "\u0001\u00bb\u0001\u00bb\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bc"+ + "\u0001\u00bc\u0001\u00bc\u0001\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00bd"+ + "\u0001\u00bd\u0002\u0258\u029d\u0000\u00be\u000f\u0001\u0011\u0002\u0013"+ + "\u0003\u0015\u0004\u0017\u0005\u0019\u0006\u001b\u0007\u001d\b\u001f\t"+ + "!\n#\u000b%\f\'\r)\u000e+\u000f-\u0010/\u00111\u00123\u00135\u00147\u0015"+ + "9\u0016;\u0017=\u0018?\u0019A\u0000C\u0000E\u0000G\u0000I\u0000K\u0000"+ + "M\u0000O\u0000Q\u0000S\u0000U\u001aW\u001bY\u001c[\u001d]\u001e_\u001f"+ + "a c!e\"g#i$k%m&o\'q(s)u*w+y,{-}.\u007f/\u00810\u00831\u00852\u00873\u0089"+ + "4\u008b5\u008d6\u008f7\u00918\u00939\u0095:\u0097;\u0099<\u009b=\u009d"+ + ">\u009f?\u00a1\u0000\u00a3@\u00a5A\u00a7B\u00a9C\u00ab\u0000\u00adD\u00af"+ + "E\u00b1F\u00b3G\u00b5\u0000\u00b7\u0000\u00b9H\u00bbI\u00bdJ\u00bf\u0000"+ + "\u00c1\u0000\u00c3\u0000\u00c5\u0000\u00c7\u0000\u00c9\u0000\u00cbK\u00cd"+ + "\u0000\u00cfL\u00d1\u0000\u00d3\u0000\u00d5M\u00d7N\u00d9O\u00db\u0000"+ + "\u00dd\u0000\u00df\u0000\u00e1\u0000\u00e3\u0000\u00e5P\u00e7Q\u00e9R"+ + "\u00ebS\u00ed\u0000\u00ef\u0000\u00f1\u0000\u00f3\u0000\u00f5T\u00f7\u0000"+ + "\u00f9U\u00fbV\u00fdW\u00ff\u0000\u0101\u0000\u0103X\u0105Y\u0107\u0000"+ + "\u0109Z\u010b\u0000\u010d[\u010f\\\u0111]\u0113\u0000\u0115\u0000\u0117"+ + "\u0000\u0119\u0000\u011b\u0000\u011d\u0000\u011f\u0000\u0121^\u0123_\u0125"+ + "`\u0127\u0000\u0129\u0000\u012b\u0000\u012d\u0000\u012fa\u0131b\u0133"+ + "c\u0135\u0000\u0137d\u0139e\u013bf\u013dg\u013f\u0000\u0141h\u0143i\u0145"+ + "j\u0147k\u0149l\u014b\u0000\u014d\u0000\u014f\u0000\u0151\u0000\u0153"+ + "\u0000\u0155\u0000\u0157\u0000\u0159m\u015bn\u015do\u015f\u0000\u0161"+ + "\u0000\u0163\u0000\u0165\u0000\u0167p\u0169q\u016br\u016d\u0000\u016f"+ + "\u0000\u0171\u0000\u0173s\u0175t\u0177u\u0179\u0000\u017b\u0000\u017d"+ + "v\u017fw\u0181x\u0183\u0000\u0185\u0000\u0187\u0000\u0189\u0000\u000f"+ + "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e"+ + "#\u0002\u0000DDdd\u0002\u0000IIii\u0002\u0000SSss\u0002\u0000EEee\u0002"+ + "\u0000CCcc\u0002\u0000TTtt\u0002\u0000RRrr\u0002\u0000OOoo\u0002\u0000"+ + "PPpp\u0002\u0000NNnn\u0002\u0000HHhh\u0002\u0000VVvv\u0002\u0000AAaa\u0002"+ + "\u0000LLll\u0002\u0000XXxx\u0002\u0000FFff\u0002\u0000MMmm\u0002\u0000"+ + "GGgg\u0002\u0000KKkk\u0002\u0000WWww\u0002\u0000UUuu\u0006\u0000\t\n\r"+ + "\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000\t\n\r\r \u0001\u000009\u0002"+ + "\u0000AZaz\b\u0000\"\"NNRRTT\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002"+ + "\u0000++--\u0001\u0000``\u0002\u0000BBbb\u0002\u0000YYyy\u000b\u0000\t"+ + "\n\r\r \"\",,//::==[[]]||\u0002\u0000**//\u000b\u0000\t\n\r\r \"#,,"+ + "//::<<>?\\\\||\u05af\u0000\u000f\u0001\u0000\u0000\u0000\u0000\u0011\u0001"+ + "\u0000\u0000\u0000\u0000\u0013\u0001\u0000\u0000\u0000\u0000\u0015\u0001"+ + "\u0000\u0000\u0000\u0000\u0017\u0001\u0000\u0000\u0000\u0000\u0019\u0001"+ + "\u0000\u0000\u0000\u0000\u001b\u0001\u0000\u0000\u0000\u0000\u001d\u0001"+ + "\u0000\u0000\u0000\u0000\u001f\u0001\u0000\u0000\u0000\u0000!\u0001\u0000"+ + "\u0000\u0000\u0000#\u0001\u0000\u0000\u0000\u0000%\u0001\u0000\u0000\u0000"+ + "\u0000\'\u0001\u0000\u0000\u0000\u0000)\u0001\u0000\u0000\u0000\u0000"+ + "+\u0001\u0000\u0000\u0000\u0000-\u0001\u0000\u0000\u0000\u0000/\u0001"+ + "\u0000\u0000\u0000\u00001\u0001\u0000\u0000\u0000\u00003\u0001\u0000\u0000"+ + "\u0000\u00005\u0001\u0000\u0000\u0000\u00007\u0001\u0000\u0000\u0000\u0000"+ + "9\u0001\u0000\u0000\u0000\u0000;\u0001\u0000\u0000\u0000\u0000=\u0001"+ + "\u0000\u0000\u0000\u0001?\u0001\u0000\u0000\u0000\u0001U\u0001\u0000\u0000"+ + "\u0000\u0001W\u0001\u0000\u0000\u0000\u0001Y\u0001\u0000\u0000\u0000\u0001"+ + "[\u0001\u0000\u0000\u0000\u0001]\u0001\u0000\u0000\u0000\u0001_\u0001"+ + "\u0000\u0000\u0000\u0001a\u0001\u0000\u0000\u0000\u0001c\u0001\u0000\u0000"+ + "\u0000\u0001e\u0001\u0000\u0000\u0000\u0001g\u0001\u0000\u0000\u0000\u0001"+ + "i\u0001\u0000\u0000\u0000\u0001k\u0001\u0000\u0000\u0000\u0001m\u0001"+ + "\u0000\u0000\u0000\u0001o\u0001\u0000\u0000\u0000\u0001q\u0001\u0000\u0000"+ + "\u0000\u0001s\u0001\u0000\u0000\u0000\u0001u\u0001\u0000\u0000\u0000\u0001"+ + "w\u0001\u0000\u0000\u0000\u0001y\u0001\u0000\u0000\u0000\u0001{\u0001"+ + "\u0000\u0000\u0000\u0001}\u0001\u0000\u0000\u0000\u0001\u007f\u0001\u0000"+ + "\u0000\u0000\u0001\u0081\u0001\u0000\u0000\u0000\u0001\u0083\u0001\u0000"+ + "\u0000\u0000\u0001\u0085\u0001\u0000\u0000\u0000\u0001\u0087\u0001\u0000"+ + "\u0000\u0000\u0001\u0089\u0001\u0000\u0000\u0000\u0001\u008b\u0001\u0000"+ + "\u0000\u0000\u0001\u008d\u0001\u0000\u0000\u0000\u0001\u008f\u0001\u0000"+ + "\u0000\u0000\u0001\u0091\u0001\u0000\u0000\u0000\u0001\u0093\u0001\u0000"+ + "\u0000\u0000\u0001\u0095\u0001\u0000\u0000\u0000\u0001\u0097\u0001\u0000"+ + "\u0000\u0000\u0001\u0099\u0001\u0000\u0000\u0000\u0001\u009b\u0001\u0000"+ + "\u0000\u0000\u0001\u009d\u0001\u0000\u0000\u0000\u0001\u009f\u0001\u0000"+ + "\u0000\u0000\u0001\u00a1\u0001\u0000\u0000\u0000\u0001\u00a3\u0001\u0000"+ + "\u0000\u0000\u0001\u00a5\u0001\u0000\u0000\u0000\u0001\u00a7\u0001\u0000"+ + "\u0000\u0000\u0001\u00a9\u0001\u0000\u0000\u0000\u0001\u00ad\u0001\u0000"+ + "\u0000\u0000\u0001\u00af\u0001\u0000\u0000\u0000\u0001\u00b1\u0001\u0000"+ + "\u0000\u0000\u0001\u00b3\u0001\u0000\u0000\u0000\u0002\u00b5\u0001\u0000"+ + "\u0000\u0000\u0002\u00b7\u0001\u0000\u0000\u0000\u0002\u00b9\u0001\u0000"+ + "\u0000\u0000\u0002\u00bb\u0001\u0000\u0000\u0000\u0002\u00bd\u0001\u0000"+ + "\u0000\u0000\u0003\u00bf\u0001\u0000\u0000\u0000\u0003\u00c1\u0001\u0000"+ + "\u0000\u0000\u0003\u00c3\u0001\u0000\u0000\u0000\u0003\u00c5\u0001\u0000"+ + "\u0000\u0000\u0003\u00c7\u0001\u0000\u0000\u0000\u0003\u00c9\u0001\u0000"+ + "\u0000\u0000\u0003\u00cb\u0001\u0000\u0000\u0000\u0003\u00cf\u0001\u0000"+ + "\u0000\u0000\u0003\u00d1\u0001\u0000\u0000\u0000\u0003\u00d3\u0001\u0000"+ + "\u0000\u0000\u0003\u00d5\u0001\u0000\u0000\u0000\u0003\u00d7\u0001\u0000"+ + "\u0000\u0000\u0003\u00d9\u0001\u0000\u0000\u0000\u0004\u00db\u0001\u0000"+ + "\u0000\u0000\u0004\u00dd\u0001\u0000\u0000\u0000\u0004\u00df\u0001\u0000"+ + "\u0000\u0000\u0004\u00e5\u0001\u0000\u0000\u0000\u0004\u00e7\u0001\u0000"+ + "\u0000\u0000\u0004\u00e9\u0001\u0000\u0000\u0000\u0004\u00eb\u0001\u0000"+ + "\u0000\u0000\u0005\u00ed\u0001\u0000\u0000\u0000\u0005\u00ef\u0001\u0000"+ + "\u0000\u0000\u0005\u00f1\u0001\u0000\u0000\u0000\u0005\u00f3\u0001\u0000"+ + "\u0000\u0000\u0005\u00f5\u0001\u0000\u0000\u0000\u0005\u00f7\u0001\u0000"+ + "\u0000\u0000\u0005\u00f9\u0001\u0000\u0000\u0000\u0005\u00fb\u0001\u0000"+ + "\u0000\u0000\u0005\u00fd\u0001\u0000\u0000\u0000\u0006\u00ff\u0001\u0000"+ + "\u0000\u0000\u0006\u0101\u0001\u0000\u0000\u0000\u0006\u0103\u0001\u0000"+ + "\u0000\u0000\u0006\u0105\u0001\u0000\u0000\u0000\u0006\u0109\u0001\u0000"+ + "\u0000\u0000\u0006\u010b\u0001\u0000\u0000\u0000\u0006\u010d\u0001\u0000"+ + "\u0000\u0000\u0006\u010f\u0001\u0000\u0000\u0000\u0006\u0111\u0001\u0000"+ + "\u0000\u0000\u0007\u0113\u0001\u0000\u0000\u0000\u0007\u0115\u0001\u0000"+ + "\u0000\u0000\u0007\u0117\u0001\u0000\u0000\u0000\u0007\u0119\u0001\u0000"+ + "\u0000\u0000\u0007\u011b\u0001\u0000\u0000\u0000\u0007\u011d\u0001\u0000"+ + "\u0000\u0000\u0007\u011f\u0001\u0000\u0000\u0000\u0007\u0121\u0001\u0000"+ + "\u0000\u0000\u0007\u0123\u0001\u0000\u0000\u0000\u0007\u0125\u0001\u0000"+ + "\u0000\u0000\b\u0127\u0001\u0000\u0000\u0000\b\u0129\u0001\u0000\u0000"+ + "\u0000\b\u012b\u0001\u0000\u0000\u0000\b\u012d\u0001\u0000\u0000\u0000"+ + "\b\u012f\u0001\u0000\u0000\u0000\b\u0131\u0001\u0000\u0000\u0000\b\u0133"+ + "\u0001\u0000\u0000\u0000\t\u0135\u0001\u0000\u0000\u0000\t\u0137\u0001"+ + "\u0000\u0000\u0000\t\u0139\u0001\u0000\u0000\u0000\t\u013b\u0001\u0000"+ + "\u0000\u0000\t\u013d\u0001\u0000\u0000\u0000\n\u013f\u0001\u0000\u0000"+ + "\u0000\n\u0141\u0001\u0000\u0000\u0000\n\u0143\u0001\u0000\u0000\u0000"+ + "\n\u0145\u0001\u0000\u0000\u0000\n\u0147\u0001\u0000\u0000\u0000\n\u0149"+ + "\u0001\u0000\u0000\u0000\u000b\u014b\u0001\u0000\u0000\u0000\u000b\u014d"+ + "\u0001\u0000\u0000\u0000\u000b\u014f\u0001\u0000\u0000\u0000\u000b\u0151"+ + "\u0001\u0000\u0000\u0000\u000b\u0153\u0001\u0000\u0000\u0000\u000b\u0155"+ + "\u0001\u0000\u0000\u0000\u000b\u0157\u0001\u0000\u0000\u0000\u000b\u0159"+ + "\u0001\u0000\u0000\u0000\u000b\u015b\u0001\u0000\u0000\u0000\u000b\u015d"+ + "\u0001\u0000\u0000\u0000\f\u015f\u0001\u0000\u0000\u0000\f\u0161\u0001"+ + "\u0000\u0000\u0000\f\u0163\u0001\u0000\u0000\u0000\f\u0165\u0001\u0000"+ + "\u0000\u0000\f\u0167\u0001\u0000\u0000\u0000\f\u0169\u0001\u0000\u0000"+ + "\u0000\f\u016b\u0001\u0000\u0000\u0000\r\u016d\u0001\u0000\u0000\u0000"+ + "\r\u016f\u0001\u0000\u0000\u0000\r\u0171\u0001\u0000\u0000\u0000\r\u0173"+ + "\u0001\u0000\u0000\u0000\r\u0175\u0001\u0000\u0000\u0000\r\u0177\u0001"+ + "\u0000\u0000\u0000\u000e\u0179\u0001\u0000\u0000\u0000\u000e\u017b\u0001"+ + "\u0000\u0000\u0000\u000e\u017d\u0001\u0000\u0000\u0000\u000e\u017f\u0001"+ + "\u0000\u0000\u0000\u000e\u0181\u0001\u0000\u0000\u0000\u000e\u0183\u0001"+ + "\u0000\u0000\u0000\u000e\u0185\u0001\u0000\u0000\u0000\u000e\u0187\u0001"+ + "\u0000\u0000\u0000\u000e\u0189\u0001\u0000\u0000\u0000\u000f\u018b\u0001"+ + "\u0000\u0000\u0000\u0011\u0195\u0001\u0000\u0000\u0000\u0013\u019c\u0001"+ + "\u0000\u0000\u0000\u0015\u01a5\u0001\u0000\u0000\u0000\u0017\u01ac\u0001"+ + "\u0000\u0000\u0000\u0019\u01b6\u0001\u0000\u0000\u0000\u001b\u01bd\u0001"+ + "\u0000\u0000\u0000\u001d\u01c4\u0001\u0000\u0000\u0000\u001f\u01cb\u0001"+ + "\u0000\u0000\u0000!\u01d3\u0001\u0000\u0000\u0000#\u01df\u0001\u0000\u0000"+ + "\u0000%\u01e8\u0001\u0000\u0000\u0000\'\u01ee\u0001\u0000\u0000\u0000"+ + ")\u01f5\u0001\u0000\u0000\u0000+\u01fc\u0001\u0000\u0000\u0000-\u0204"+ + "\u0001\u0000\u0000\u0000/\u020c\u0001\u0000\u0000\u00001\u021b\u0001\u0000"+ + "\u0000\u00003\u0225\u0001\u0000\u0000\u00005\u022e\u0001\u0000\u0000\u0000"+ + "7\u023a\u0001\u0000\u0000\u00009\u0240\u0001\u0000\u0000\u0000;\u0251"+ + "\u0001\u0000\u0000\u0000=\u0261\u0001\u0000\u0000\u0000?\u0267\u0001\u0000"+ + "\u0000\u0000A\u026b\u0001\u0000\u0000\u0000C\u026d\u0001\u0000\u0000\u0000"+ + "E\u026f\u0001\u0000\u0000\u0000G\u0272\u0001\u0000\u0000\u0000I\u0274"+ + "\u0001\u0000\u0000\u0000K\u027d\u0001\u0000\u0000\u0000M\u027f\u0001\u0000"+ + "\u0000\u0000O\u0284\u0001\u0000\u0000\u0000Q\u0286\u0001\u0000\u0000\u0000"+ + "S\u028b\u0001\u0000\u0000\u0000U\u02aa\u0001\u0000\u0000\u0000W\u02ad"+ + "\u0001\u0000\u0000\u0000Y\u02db\u0001\u0000\u0000\u0000[\u02dd\u0001\u0000"+ + "\u0000\u0000]\u02e0\u0001\u0000\u0000\u0000_\u02e4\u0001\u0000\u0000\u0000"+ + "a\u02e8\u0001\u0000\u0000\u0000c\u02ea\u0001\u0000\u0000\u0000e\u02ed"+ + "\u0001\u0000\u0000\u0000g\u02ef\u0001\u0000\u0000\u0000i\u02f4\u0001\u0000"+ + "\u0000\u0000k\u02f6\u0001\u0000\u0000\u0000m\u02fc\u0001\u0000\u0000\u0000"+ + "o\u0302\u0001\u0000\u0000\u0000q\u0305\u0001\u0000\u0000\u0000s\u0308"+ + "\u0001\u0000\u0000\u0000u\u030d\u0001\u0000\u0000\u0000w\u0312\u0001\u0000"+ + "\u0000\u0000y\u0314\u0001\u0000\u0000\u0000{\u0318\u0001\u0000\u0000\u0000"+ + "}\u031d\u0001\u0000\u0000\u0000\u007f\u0323\u0001\u0000\u0000\u0000\u0081"+ + "\u0326\u0001\u0000\u0000\u0000\u0083\u0328\u0001\u0000\u0000\u0000\u0085"+ + "\u032e\u0001\u0000\u0000\u0000\u0087\u0330\u0001\u0000\u0000\u0000\u0089"+ + "\u0335\u0001\u0000\u0000\u0000\u008b\u0338\u0001\u0000\u0000\u0000\u008d"+ + "\u033b\u0001\u0000\u0000\u0000\u008f\u033e\u0001\u0000\u0000\u0000\u0091"+ + "\u0340\u0001\u0000\u0000\u0000\u0093\u0343\u0001\u0000\u0000\u0000\u0095"+ + "\u0345\u0001\u0000\u0000\u0000\u0097\u0348\u0001\u0000\u0000\u0000\u0099"+ + "\u034a\u0001\u0000\u0000\u0000\u009b\u034c\u0001\u0000\u0000\u0000\u009d"+ + "\u034e\u0001\u0000\u0000\u0000\u009f\u0350\u0001\u0000\u0000\u0000\u00a1"+ + "\u0352\u0001\u0000\u0000\u0000\u00a3\u0368\u0001\u0000\u0000\u0000\u00a5"+ + "\u036a\u0001\u0000\u0000\u0000\u00a7\u036f\u0001\u0000\u0000\u0000\u00a9"+ + "\u0384\u0001\u0000\u0000\u0000\u00ab\u0386\u0001\u0000\u0000\u0000\u00ad"+ + "\u038e\u0001\u0000\u0000\u0000\u00af\u0390\u0001\u0000\u0000\u0000\u00b1"+ + "\u0394\u0001\u0000\u0000\u0000\u00b3\u0398\u0001\u0000\u0000\u0000\u00b5"+ + "\u039c\u0001\u0000\u0000\u0000\u00b7\u03a1\u0001\u0000\u0000\u0000\u00b9"+ + "\u03a6\u0001\u0000\u0000\u0000\u00bb\u03aa\u0001\u0000\u0000\u0000\u00bd"+ + "\u03ae\u0001\u0000\u0000\u0000\u00bf\u03b2\u0001\u0000\u0000\u0000\u00c1"+ + "\u03b7\u0001\u0000\u0000\u0000\u00c3\u03bb\u0001\u0000\u0000\u0000\u00c5"+ + "\u03bf\u0001\u0000\u0000\u0000\u00c7\u03c3\u0001\u0000\u0000\u0000\u00c9"+ + "\u03c7\u0001\u0000\u0000\u0000\u00cb\u03cb\u0001\u0000\u0000\u0000\u00cd"+ + "\u03d7\u0001\u0000\u0000\u0000\u00cf\u03da\u0001\u0000\u0000\u0000\u00d1"+ + "\u03de\u0001\u0000\u0000\u0000\u00d3\u03e2\u0001\u0000\u0000\u0000\u00d5"+ + "\u03e6\u0001\u0000\u0000\u0000\u00d7\u03ea\u0001\u0000\u0000\u0000\u00d9"+ + "\u03ee\u0001\u0000\u0000\u0000\u00db\u03f2\u0001\u0000\u0000\u0000\u00dd"+ + "\u03f7\u0001\u0000\u0000\u0000\u00df\u03fb\u0001\u0000\u0000\u0000\u00e1"+ + "\u0403\u0001\u0000\u0000\u0000\u00e3\u0418\u0001\u0000\u0000\u0000\u00e5"+ + "\u041c\u0001\u0000\u0000\u0000\u00e7\u0420\u0001\u0000\u0000\u0000\u00e9"+ + "\u0424\u0001\u0000\u0000\u0000\u00eb\u0428\u0001\u0000\u0000\u0000\u00ed"+ + "\u042c\u0001\u0000\u0000\u0000\u00ef\u0431\u0001\u0000\u0000\u0000\u00f1"+ + "\u0435\u0001\u0000\u0000\u0000\u00f3\u0439\u0001\u0000\u0000\u0000\u00f5"+ + "\u043d\u0001\u0000\u0000\u0000\u00f7\u0440\u0001\u0000\u0000\u0000\u00f9"+ + "\u0444\u0001\u0000\u0000\u0000\u00fb\u0448\u0001\u0000\u0000\u0000\u00fd"+ + "\u044c\u0001\u0000\u0000\u0000\u00ff\u0450\u0001\u0000\u0000\u0000\u0101"+ + "\u0455\u0001\u0000\u0000\u0000\u0103\u045a\u0001\u0000\u0000\u0000\u0105"+ + "\u045f\u0001\u0000\u0000\u0000\u0107\u0466\u0001\u0000\u0000\u0000\u0109"+ + "\u046f\u0001\u0000\u0000\u0000\u010b\u0476\u0001\u0000\u0000\u0000\u010d"+ + "\u047a\u0001\u0000\u0000\u0000\u010f\u047e\u0001\u0000\u0000\u0000\u0111"+ + "\u0482\u0001\u0000\u0000\u0000\u0113\u0486\u0001\u0000\u0000\u0000\u0115"+ + "\u048c\u0001\u0000\u0000\u0000\u0117\u0490\u0001\u0000\u0000\u0000\u0119"+ + "\u0494\u0001\u0000\u0000\u0000\u011b\u0498\u0001\u0000\u0000\u0000\u011d"+ + "\u049c\u0001\u0000\u0000\u0000\u011f\u04a0\u0001\u0000\u0000\u0000\u0121"+ + "\u04a4\u0001\u0000\u0000\u0000\u0123\u04a8\u0001\u0000\u0000\u0000\u0125"+ + "\u04ac\u0001\u0000\u0000\u0000\u0127\u04b0\u0001\u0000\u0000\u0000\u0129"+ + "\u04b5\u0001\u0000\u0000\u0000\u012b\u04b9\u0001\u0000\u0000\u0000\u012d"+ + "\u04bd\u0001\u0000\u0000\u0000\u012f\u04c1\u0001\u0000\u0000\u0000\u0131"+ + "\u04c5\u0001\u0000\u0000\u0000\u0133\u04c9\u0001\u0000\u0000\u0000\u0135"+ + "\u04cd\u0001\u0000\u0000\u0000\u0137\u04d2\u0001\u0000\u0000\u0000\u0139"+ + "\u04d7\u0001\u0000\u0000\u0000\u013b\u04db\u0001\u0000\u0000\u0000\u013d"+ + "\u04df\u0001\u0000\u0000\u0000\u013f\u04e3\u0001\u0000\u0000\u0000\u0141"+ + "\u04e8\u0001\u0000\u0000\u0000\u0143\u04ef\u0001\u0000\u0000\u0000\u0145"+ + "\u04f3\u0001\u0000\u0000\u0000\u0147\u04f7\u0001\u0000\u0000\u0000\u0149"+ + "\u04fb\u0001\u0000\u0000\u0000\u014b\u04ff\u0001\u0000\u0000\u0000\u014d"+ + "\u0504\u0001\u0000\u0000\u0000\u014f\u0508\u0001\u0000\u0000\u0000\u0151"+ + "\u050c\u0001\u0000\u0000\u0000\u0153\u0510\u0001\u0000\u0000\u0000\u0155"+ + "\u0515\u0001\u0000\u0000\u0000\u0157\u0519\u0001\u0000\u0000\u0000\u0159"+ + "\u051d\u0001\u0000\u0000\u0000\u015b\u0521\u0001\u0000\u0000\u0000\u015d"+ + "\u0525\u0001\u0000\u0000\u0000\u015f\u0529\u0001\u0000\u0000\u0000\u0161"+ + "\u052f\u0001\u0000\u0000\u0000\u0163\u0533\u0001\u0000\u0000\u0000\u0165"+ + "\u0537\u0001\u0000\u0000\u0000\u0167\u053b\u0001\u0000\u0000\u0000\u0169"+ + "\u053f\u0001\u0000\u0000\u0000\u016b\u0543\u0001\u0000\u0000\u0000\u016d"+ + "\u0547\u0001\u0000\u0000\u0000\u016f\u054c\u0001\u0000\u0000\u0000\u0171"+ + "\u0552\u0001\u0000\u0000\u0000\u0173\u0558\u0001\u0000\u0000\u0000\u0175"+ + "\u055c\u0001\u0000\u0000\u0000\u0177\u0560\u0001\u0000\u0000\u0000\u0179"+ + "\u0564\u0001\u0000\u0000\u0000\u017b\u056a\u0001\u0000\u0000\u0000\u017d"+ + "\u0570\u0001\u0000\u0000\u0000\u017f\u0574\u0001\u0000\u0000\u0000\u0181"+ + "\u0578\u0001\u0000\u0000\u0000\u0183\u057c\u0001\u0000\u0000\u0000\u0185"+ + "\u0582\u0001\u0000\u0000\u0000\u0187\u0588\u0001\u0000\u0000\u0000\u0189"+ + "\u058e\u0001\u0000\u0000\u0000\u018b\u018c\u0007\u0000\u0000\u0000\u018c"+ + "\u018d\u0007\u0001\u0000\u0000\u018d\u018e\u0007\u0002\u0000\u0000\u018e"+ + "\u018f\u0007\u0002\u0000\u0000\u018f\u0190\u0007\u0003\u0000\u0000\u0190"+ + "\u0191\u0007\u0004\u0000\u0000\u0191\u0192\u0007\u0005\u0000\u0000\u0192"+ + "\u0193\u0001\u0000\u0000\u0000\u0193\u0194\u0006\u0000\u0000\u0000\u0194"+ + "\u0010\u0001\u0000\u0000\u0000\u0195\u0196\u0007\u0000\u0000\u0000\u0196"+ + "\u0197\u0007\u0006\u0000\u0000\u0197\u0198\u0007\u0007\u0000\u0000\u0198"+ + "\u0199\u0007\b\u0000\u0000\u0199\u019a\u0001\u0000\u0000\u0000\u019a\u019b"+ + "\u0006\u0001\u0001\u0000\u019b\u0012\u0001\u0000\u0000\u0000\u019c\u019d"+ + "\u0007\u0003\u0000\u0000\u019d\u019e\u0007\t\u0000\u0000\u019e\u019f\u0007"+ + "\u0006\u0000\u0000\u019f\u01a0\u0007\u0001\u0000\u0000\u01a0\u01a1\u0007"+ + "\u0004\u0000\u0000\u01a1\u01a2\u0007\n\u0000\u0000\u01a2\u01a3\u0001\u0000"+ + "\u0000\u0000\u01a3\u01a4\u0006\u0002\u0002\u0000\u01a4\u0014\u0001\u0000"+ + "\u0000\u0000\u01a5\u01a6\u0007\u0003\u0000\u0000\u01a6\u01a7\u0007\u000b"+ + "\u0000\u0000\u01a7\u01a8\u0007\f\u0000\u0000\u01a8\u01a9\u0007\r\u0000"+ + "\u0000\u01a9\u01aa\u0001\u0000\u0000\u0000\u01aa\u01ab\u0006\u0003\u0000"+ + "\u0000\u01ab\u0016\u0001\u0000\u0000\u0000\u01ac\u01ad\u0007\u0003\u0000"+ + "\u0000\u01ad\u01ae\u0007\u000e\u0000\u0000\u01ae\u01af\u0007\b\u0000\u0000"+ + "\u01af\u01b0\u0007\r\u0000\u0000\u01b0\u01b1\u0007\f\u0000\u0000\u01b1"+ + "\u01b2\u0007\u0001\u0000\u0000\u01b2\u01b3\u0007\t\u0000\u0000\u01b3\u01b4"+ + "\u0001\u0000\u0000\u0000\u01b4\u01b5\u0006\u0004\u0003\u0000\u01b5\u0018"+ + "\u0001\u0000\u0000\u0000\u01b6\u01b7\u0007\u000f\u0000\u0000\u01b7\u01b8"+ + "\u0007\u0006\u0000\u0000\u01b8\u01b9\u0007\u0007\u0000\u0000\u01b9\u01ba"+ + "\u0007\u0010\u0000\u0000\u01ba\u01bb\u0001\u0000\u0000\u0000\u01bb\u01bc"+ + "\u0006\u0005\u0004\u0000\u01bc\u001a\u0001\u0000\u0000\u0000\u01bd\u01be"+ + "\u0007\u0011\u0000\u0000\u01be\u01bf\u0007\u0006\u0000\u0000\u01bf\u01c0"+ + "\u0007\u0007\u0000\u0000\u01c0\u01c1\u0007\u0012\u0000\u0000\u01c1\u01c2"+ + "\u0001\u0000\u0000\u0000\u01c2\u01c3\u0006\u0006\u0000\u0000\u01c3\u001c"+ + "\u0001\u0000\u0000\u0000\u01c4\u01c5\u0007\u0012\u0000\u0000\u01c5\u01c6"+ + "\u0007\u0003\u0000\u0000\u01c6\u01c7\u0007\u0003\u0000\u0000\u01c7\u01c8"+ + "\u0007\b\u0000\u0000\u01c8\u01c9\u0001\u0000\u0000\u0000\u01c9\u01ca\u0006"+ + "\u0007\u0001\u0000\u01ca\u001e\u0001\u0000\u0000\u0000\u01cb\u01cc\u0007"+ + "\r\u0000\u0000\u01cc\u01cd\u0007\u0001\u0000\u0000\u01cd\u01ce\u0007\u0010"+ + "\u0000\u0000\u01ce\u01cf\u0007\u0001\u0000\u0000\u01cf\u01d0\u0007\u0005"+ + "\u0000\u0000\u01d0\u01d1\u0001\u0000\u0000\u0000\u01d1\u01d2\u0006\b\u0000"+ + "\u0000\u01d2 \u0001\u0000\u0000\u0000\u01d3\u01d4\u0007\u0010\u0000\u0000"+ + "\u01d4\u01d5\u0007\u000b\u0000\u0000\u01d5\u01d6\u0005_\u0000\u0000\u01d6"+ + "\u01d7\u0007\u0003\u0000\u0000\u01d7\u01d8\u0007\u000e\u0000\u0000\u01d8"+ + "\u01d9\u0007\b\u0000\u0000\u01d9\u01da\u0007\f\u0000\u0000\u01da\u01db"+ + "\u0007\t\u0000\u0000\u01db\u01dc\u0007\u0000\u0000\u0000\u01dc\u01dd\u0001"+ + "\u0000\u0000\u0000\u01dd\u01de\u0006\t\u0005\u0000\u01de\"\u0001\u0000"+ + "\u0000\u0000\u01df\u01e0\u0007\u0006\u0000\u0000\u01e0\u01e1\u0007\u0003"+ + "\u0000\u0000\u01e1\u01e2\u0007\t\u0000\u0000\u01e2\u01e3\u0007\f\u0000"+ + "\u0000\u01e3\u01e4\u0007\u0010\u0000\u0000\u01e4\u01e5\u0007\u0003\u0000"+ + "\u0000\u01e5\u01e6\u0001\u0000\u0000\u0000\u01e6\u01e7\u0006\n\u0006\u0000"+ + "\u01e7$\u0001\u0000\u0000\u0000\u01e8\u01e9\u0007\u0006\u0000\u0000\u01e9"+ + "\u01ea\u0007\u0007\u0000\u0000\u01ea\u01eb\u0007\u0013\u0000\u0000\u01eb"+ + "\u01ec\u0001\u0000\u0000\u0000\u01ec\u01ed\u0006\u000b\u0000\u0000\u01ed"+ + "&\u0001\u0000\u0000\u0000\u01ee\u01ef\u0007\u0002\u0000\u0000\u01ef\u01f0"+ + "\u0007\n\u0000\u0000\u01f0\u01f1\u0007\u0007\u0000\u0000\u01f1\u01f2\u0007"+ + "\u0013\u0000\u0000\u01f2\u01f3\u0001\u0000\u0000\u0000\u01f3\u01f4\u0006"+ + "\f\u0007\u0000\u01f4(\u0001\u0000\u0000\u0000\u01f5\u01f6\u0007\u0002"+ + "\u0000\u0000\u01f6\u01f7\u0007\u0007\u0000\u0000\u01f7\u01f8\u0007\u0006"+ + "\u0000\u0000\u01f8\u01f9\u0007\u0005\u0000\u0000\u01f9\u01fa\u0001\u0000"+ + "\u0000\u0000\u01fa\u01fb\u0006\r\u0000\u0000\u01fb*\u0001\u0000\u0000"+ + "\u0000\u01fc\u01fd\u0007\u0002\u0000\u0000\u01fd\u01fe\u0007\u0005\u0000"+ + "\u0000\u01fe\u01ff\u0007\f\u0000\u0000\u01ff\u0200\u0007\u0005\u0000\u0000"+ + "\u0200\u0201\u0007\u0002\u0000\u0000\u0201\u0202\u0001\u0000\u0000\u0000"+ + "\u0202\u0203\u0006\u000e\u0000\u0000\u0203,\u0001\u0000\u0000\u0000\u0204"+ + "\u0205\u0007\u0013\u0000\u0000\u0205\u0206\u0007\n\u0000\u0000\u0206\u0207"+ + "\u0007\u0003\u0000\u0000\u0207\u0208\u0007\u0006\u0000\u0000\u0208\u0209"+ + "\u0007\u0003\u0000\u0000\u0209\u020a\u0001\u0000\u0000\u0000\u020a\u020b"+ + "\u0006\u000f\u0000\u0000\u020b.\u0001\u0000\u0000\u0000\u020c\u020d\u0004"+ + "\u0010\u0000\u0000\u020d\u020e\u0007\u0001\u0000\u0000\u020e\u020f\u0007"+ + "\t\u0000\u0000\u020f\u0210\u0007\r\u0000\u0000\u0210\u0211\u0007\u0001"+ + "\u0000\u0000\u0211\u0212\u0007\t\u0000\u0000\u0212\u0213\u0007\u0003\u0000"+ + "\u0000\u0213\u0214\u0007\u0002\u0000\u0000\u0214\u0215\u0007\u0005\u0000"+ + "\u0000\u0215\u0216\u0007\f\u0000\u0000\u0216\u0217\u0007\u0005\u0000\u0000"+ + "\u0217\u0218\u0007\u0002\u0000\u0000\u0218\u0219\u0001\u0000\u0000\u0000"+ + "\u0219\u021a\u0006\u0010\u0000\u0000\u021a0\u0001\u0000\u0000\u0000\u021b"+ + "\u021c\u0004\u0011\u0001\u0000\u021c\u021d\u0007\r\u0000\u0000\u021d\u021e"+ + "\u0007\u0007\u0000\u0000\u021e\u021f\u0007\u0007\u0000\u0000\u021f\u0220"+ + "\u0007\u0012\u0000\u0000\u0220\u0221\u0007\u0014\u0000\u0000\u0221\u0222"+ + "\u0007\b\u0000\u0000\u0222\u0223\u0001\u0000\u0000\u0000\u0223\u0224\u0006"+ + "\u0011\b\u0000\u02242\u0001\u0000\u0000\u0000\u0225\u0226\u0004\u0012"+ + "\u0002\u0000\u0226\u0227\u0007\u0010\u0000\u0000\u0227\u0228\u0007\f\u0000"+ + "\u0000\u0228\u0229\u0007\u0005\u0000\u0000\u0229\u022a\u0007\u0004\u0000"+ + "\u0000\u022a\u022b\u0007\n\u0000\u0000\u022b\u022c\u0001\u0000\u0000\u0000"+ + "\u022c\u022d\u0006\u0012\u0000\u0000\u022d4\u0001\u0000\u0000\u0000\u022e"+ + "\u022f\u0004\u0013\u0003\u0000\u022f\u0230\u0007\u0010\u0000\u0000\u0230"+ + "\u0231\u0007\u0003\u0000\u0000\u0231\u0232\u0007\u0005\u0000\u0000\u0232"+ + "\u0233\u0007\u0006\u0000\u0000\u0233\u0234\u0007\u0001\u0000\u0000\u0234"+ + "\u0235\u0007\u0004\u0000\u0000\u0235\u0236\u0007\u0002\u0000\u0000\u0236"+ + "\u0237\u0001\u0000\u0000\u0000\u0237\u0238\u0006\u0013\t\u0000\u02386"+ + "\u0001\u0000\u0000\u0000\u0239\u023b\b\u0015\u0000\u0000\u023a\u0239\u0001"+ + "\u0000\u0000\u0000\u023b\u023c\u0001\u0000\u0000\u0000\u023c\u023a\u0001"+ + "\u0000\u0000\u0000\u023c\u023d\u0001\u0000\u0000\u0000\u023d\u023e\u0001"+ + "\u0000\u0000\u0000\u023e\u023f\u0006\u0014\u0000\u0000\u023f8\u0001\u0000"+ + "\u0000\u0000\u0240\u0241\u0005/\u0000\u0000\u0241\u0242\u0005/\u0000\u0000"+ + "\u0242\u0246\u0001\u0000\u0000\u0000\u0243\u0245\b\u0016\u0000\u0000\u0244"+ + "\u0243\u0001\u0000\u0000\u0000\u0245\u0248\u0001\u0000\u0000\u0000\u0246"+ + "\u0244\u0001\u0000\u0000\u0000\u0246\u0247\u0001\u0000\u0000\u0000\u0247"+ + "\u024a\u0001\u0000\u0000\u0000\u0248\u0246\u0001\u0000\u0000\u0000\u0249"+ + "\u024b\u0005\r\u0000\u0000\u024a\u0249\u0001\u0000\u0000\u0000\u024a\u024b"+ + "\u0001\u0000\u0000\u0000\u024b\u024d\u0001\u0000\u0000\u0000\u024c\u024e"+ + "\u0005\n\u0000\u0000\u024d\u024c\u0001\u0000\u0000\u0000\u024d\u024e\u0001"+ + "\u0000\u0000\u0000\u024e\u024f\u0001\u0000\u0000\u0000\u024f\u0250\u0006"+ + "\u0015\n\u0000\u0250:\u0001\u0000\u0000\u0000\u0251\u0252\u0005/\u0000"+ + "\u0000\u0252\u0253\u0005*\u0000\u0000\u0253\u0258\u0001\u0000\u0000\u0000"+ + "\u0254\u0257\u0003;\u0016\u0000\u0255\u0257\t\u0000\u0000\u0000\u0256"+ + "\u0254\u0001\u0000\u0000\u0000\u0256\u0255\u0001\u0000\u0000\u0000\u0257"+ + "\u025a\u0001\u0000\u0000\u0000\u0258\u0259\u0001\u0000\u0000\u0000\u0258"+ + "\u0256\u0001\u0000\u0000\u0000\u0259\u025b\u0001\u0000\u0000\u0000\u025a"+ + "\u0258\u0001\u0000\u0000\u0000\u025b\u025c\u0005*\u0000\u0000\u025c\u025d"+ + "\u0005/\u0000\u0000\u025d\u025e\u0001\u0000\u0000\u0000\u025e\u025f\u0006"+ + "\u0016\n\u0000\u025f<\u0001\u0000\u0000\u0000\u0260\u0262\u0007\u0017"+ + "\u0000\u0000\u0261\u0260\u0001\u0000\u0000\u0000\u0262\u0263\u0001\u0000"+ + "\u0000\u0000\u0263\u0261\u0001\u0000\u0000\u0000\u0263\u0264\u0001\u0000"+ + "\u0000\u0000\u0264\u0265\u0001\u0000\u0000\u0000\u0265\u0266\u0006\u0017"+ + "\n\u0000\u0266>\u0001\u0000\u0000\u0000\u0267\u0268\u0005|\u0000\u0000"+ + "\u0268\u0269\u0001\u0000\u0000\u0000\u0269\u026a\u0006\u0018\u000b\u0000"+ + "\u026a@\u0001\u0000\u0000\u0000\u026b\u026c\u0007\u0018\u0000\u0000\u026c"+ + "B\u0001\u0000\u0000\u0000\u026d\u026e\u0007\u0019\u0000\u0000\u026eD\u0001"+ + "\u0000\u0000\u0000\u026f\u0270\u0005\\\u0000\u0000\u0270\u0271\u0007\u001a"+ + "\u0000\u0000\u0271F\u0001\u0000\u0000\u0000\u0272\u0273\b\u001b\u0000"+ + "\u0000\u0273H\u0001\u0000\u0000\u0000\u0274\u0276\u0007\u0003\u0000\u0000"+ + "\u0275\u0277\u0007\u001c\u0000\u0000\u0276\u0275\u0001\u0000\u0000\u0000"+ + "\u0276\u0277\u0001\u0000\u0000\u0000\u0277\u0279\u0001\u0000\u0000\u0000"+ + "\u0278\u027a\u0003A\u0019\u0000\u0279\u0278\u0001\u0000\u0000\u0000\u027a"+ + "\u027b\u0001\u0000\u0000\u0000\u027b\u0279\u0001\u0000\u0000\u0000\u027b"+ + "\u027c\u0001\u0000\u0000\u0000\u027cJ\u0001\u0000\u0000\u0000\u027d\u027e"+ + "\u0005@\u0000\u0000\u027eL\u0001\u0000\u0000\u0000\u027f\u0280\u0005`"+ + "\u0000\u0000\u0280N\u0001\u0000\u0000\u0000\u0281\u0285\b\u001d\u0000"+ + "\u0000\u0282\u0283\u0005`\u0000\u0000\u0283\u0285\u0005`\u0000\u0000\u0284"+ + "\u0281\u0001\u0000\u0000\u0000\u0284\u0282\u0001\u0000\u0000\u0000\u0285"+ + "P\u0001\u0000\u0000\u0000\u0286\u0287\u0005_\u0000\u0000\u0287R\u0001"+ + "\u0000\u0000\u0000\u0288\u028c\u0003C\u001a\u0000\u0289\u028c\u0003A\u0019"+ + "\u0000\u028a\u028c\u0003Q!\u0000\u028b\u0288\u0001\u0000\u0000\u0000\u028b"+ + "\u0289\u0001\u0000\u0000\u0000\u028b\u028a\u0001\u0000\u0000\u0000\u028c"+ + "T\u0001\u0000\u0000\u0000\u028d\u0292\u0005\"\u0000\u0000\u028e\u0291"+ + "\u0003E\u001b\u0000\u028f\u0291\u0003G\u001c\u0000\u0290\u028e\u0001\u0000"+ + "\u0000\u0000\u0290\u028f\u0001\u0000\u0000\u0000\u0291\u0294\u0001\u0000"+ + "\u0000\u0000\u0292\u0290\u0001\u0000\u0000\u0000\u0292\u0293\u0001\u0000"+ + "\u0000\u0000\u0293\u0295\u0001\u0000\u0000\u0000\u0294\u0292\u0001\u0000"+ + "\u0000\u0000\u0295\u02ab\u0005\"\u0000\u0000\u0296\u0297\u0005\"\u0000"+ + "\u0000\u0297\u0298\u0005\"\u0000\u0000\u0298\u0299\u0005\"\u0000\u0000"+ + "\u0299\u029d\u0001\u0000\u0000\u0000\u029a\u029c\b\u0016\u0000\u0000\u029b"+ + "\u029a\u0001\u0000\u0000\u0000\u029c\u029f\u0001\u0000\u0000\u0000\u029d"+ + "\u029e\u0001\u0000\u0000\u0000\u029d\u029b\u0001\u0000\u0000\u0000\u029e"+ + "\u02a0\u0001\u0000\u0000\u0000\u029f\u029d\u0001\u0000\u0000\u0000\u02a0"+ + "\u02a1\u0005\"\u0000\u0000\u02a1\u02a2\u0005\"\u0000\u0000\u02a2\u02a3"+ + "\u0005\"\u0000\u0000\u02a3\u02a5\u0001\u0000\u0000\u0000\u02a4\u02a6\u0005"+ + "\"\u0000\u0000\u02a5\u02a4\u0001\u0000\u0000\u0000\u02a5\u02a6\u0001\u0000"+ + "\u0000\u0000\u02a6\u02a8\u0001\u0000\u0000\u0000\u02a7\u02a9\u0005\"\u0000"+ + "\u0000\u02a8\u02a7\u0001\u0000\u0000\u0000\u02a8\u02a9\u0001\u0000\u0000"+ + "\u0000\u02a9\u02ab\u0001\u0000\u0000\u0000\u02aa\u028d\u0001\u0000\u0000"+ + "\u0000\u02aa\u0296\u0001\u0000\u0000\u0000\u02abV\u0001\u0000\u0000\u0000"+ + "\u02ac\u02ae\u0003A\u0019\u0000\u02ad\u02ac\u0001\u0000\u0000\u0000\u02ae"+ + "\u02af\u0001\u0000\u0000\u0000\u02af\u02ad\u0001\u0000\u0000\u0000\u02af"+ + "\u02b0\u0001\u0000\u0000\u0000\u02b0X\u0001\u0000\u0000\u0000\u02b1\u02b3"+ + "\u0003A\u0019\u0000\u02b2\u02b1\u0001\u0000\u0000\u0000\u02b3\u02b4\u0001"+ + "\u0000\u0000\u0000\u02b4\u02b2\u0001\u0000\u0000\u0000\u02b4\u02b5\u0001"+ + "\u0000\u0000\u0000\u02b5\u02b6\u0001\u0000\u0000\u0000\u02b6\u02ba\u0003"+ + "i-\u0000\u02b7\u02b9\u0003A\u0019\u0000\u02b8\u02b7\u0001\u0000\u0000"+ + "\u0000\u02b9\u02bc\u0001\u0000\u0000\u0000\u02ba\u02b8\u0001\u0000\u0000"+ + "\u0000\u02ba\u02bb\u0001\u0000\u0000\u0000\u02bb\u02dc\u0001\u0000\u0000"+ + "\u0000\u02bc\u02ba\u0001\u0000\u0000\u0000\u02bd\u02bf\u0003i-\u0000\u02be"+ + "\u02c0\u0003A\u0019\u0000\u02bf\u02be\u0001\u0000\u0000\u0000\u02c0\u02c1"+ + "\u0001\u0000\u0000\u0000\u02c1\u02bf\u0001\u0000\u0000\u0000\u02c1\u02c2"+ + "\u0001\u0000\u0000\u0000\u02c2\u02dc\u0001\u0000\u0000\u0000\u02c3\u02c5"+ + "\u0003A\u0019\u0000\u02c4\u02c3\u0001\u0000\u0000\u0000\u02c5\u02c6\u0001"+ + "\u0000\u0000\u0000\u02c6\u02c4\u0001\u0000\u0000\u0000\u02c6\u02c7\u0001"+ + "\u0000\u0000\u0000\u02c7\u02cf\u0001\u0000\u0000\u0000\u02c8\u02cc\u0003"+ + "i-\u0000\u02c9\u02cb\u0003A\u0019\u0000\u02ca\u02c9\u0001\u0000\u0000"+ + "\u0000\u02cb\u02ce\u0001\u0000\u0000\u0000\u02cc\u02ca\u0001\u0000\u0000"+ + "\u0000\u02cc\u02cd\u0001\u0000\u0000\u0000\u02cd\u02d0\u0001\u0000\u0000"+ + "\u0000\u02ce\u02cc\u0001\u0000\u0000\u0000\u02cf\u02c8\u0001\u0000\u0000"+ + "\u0000\u02cf\u02d0\u0001\u0000\u0000\u0000\u02d0\u02d1\u0001\u0000\u0000"+ + "\u0000\u02d1\u02d2\u0003I\u001d\u0000\u02d2\u02dc\u0001\u0000\u0000\u0000"+ + "\u02d3\u02d5\u0003i-\u0000\u02d4\u02d6\u0003A\u0019\u0000\u02d5\u02d4"+ + "\u0001\u0000\u0000\u0000\u02d6\u02d7\u0001\u0000\u0000\u0000\u02d7\u02d5"+ + "\u0001\u0000\u0000\u0000\u02d7\u02d8\u0001\u0000\u0000\u0000\u02d8\u02d9"+ + "\u0001\u0000\u0000\u0000\u02d9\u02da\u0003I\u001d\u0000\u02da\u02dc\u0001"+ + "\u0000\u0000\u0000\u02db\u02b2\u0001\u0000\u0000\u0000\u02db\u02bd\u0001"+ + "\u0000\u0000\u0000\u02db\u02c4\u0001\u0000\u0000\u0000\u02db\u02d3\u0001"+ + "\u0000\u0000\u0000\u02dcZ\u0001\u0000\u0000\u0000\u02dd\u02de\u0007\u001e"+ + "\u0000\u0000\u02de\u02df\u0007\u001f\u0000\u0000\u02df\\\u0001\u0000\u0000"+ + "\u0000\u02e0\u02e1\u0007\f\u0000\u0000\u02e1\u02e2\u0007\t\u0000\u0000"+ + "\u02e2\u02e3\u0007\u0000\u0000\u0000\u02e3^\u0001\u0000\u0000\u0000\u02e4"+ + "\u02e5\u0007\f\u0000\u0000\u02e5\u02e6\u0007\u0002\u0000\u0000\u02e6\u02e7"+ + "\u0007\u0004\u0000\u0000\u02e7`\u0001\u0000\u0000\u0000\u02e8\u02e9\u0005"+ + "=\u0000\u0000\u02e9b\u0001\u0000\u0000\u0000\u02ea\u02eb\u0005:\u0000"+ + "\u0000\u02eb\u02ec\u0005:\u0000\u0000\u02ecd\u0001\u0000\u0000\u0000\u02ed"+ + "\u02ee\u0005,\u0000\u0000\u02eef\u0001\u0000\u0000\u0000\u02ef\u02f0\u0007"+ + "\u0000\u0000\u0000\u02f0\u02f1\u0007\u0003\u0000\u0000\u02f1\u02f2\u0007"+ + "\u0002\u0000\u0000\u02f2\u02f3\u0007\u0004\u0000\u0000\u02f3h\u0001\u0000"+ + "\u0000\u0000\u02f4\u02f5\u0005.\u0000\u0000\u02f5j\u0001\u0000\u0000\u0000"+ + "\u02f6\u02f7\u0007\u000f\u0000\u0000\u02f7\u02f8\u0007\f\u0000\u0000\u02f8"+ + "\u02f9\u0007\r\u0000\u0000\u02f9\u02fa\u0007\u0002\u0000\u0000\u02fa\u02fb"+ + "\u0007\u0003\u0000\u0000\u02fbl\u0001\u0000\u0000\u0000\u02fc\u02fd\u0007"+ + "\u000f\u0000\u0000\u02fd\u02fe\u0007\u0001\u0000\u0000\u02fe\u02ff\u0007"+ + "\u0006\u0000\u0000\u02ff\u0300\u0007\u0002\u0000\u0000\u0300\u0301\u0007"+ + "\u0005\u0000\u0000\u0301n\u0001\u0000\u0000\u0000\u0302\u0303\u0007\u0001"+ + "\u0000\u0000\u0303\u0304\u0007\t\u0000\u0000\u0304p\u0001\u0000\u0000"+ + "\u0000\u0305\u0306\u0007\u0001\u0000\u0000\u0306\u0307\u0007\u0002\u0000"+ + "\u0000\u0307r\u0001\u0000\u0000\u0000\u0308\u0309\u0007\r\u0000\u0000"+ + "\u0309\u030a\u0007\f\u0000\u0000\u030a\u030b\u0007\u0002\u0000\u0000\u030b"+ + "\u030c\u0007\u0005\u0000\u0000\u030ct\u0001\u0000\u0000\u0000\u030d\u030e"+ + "\u0007\r\u0000\u0000\u030e\u030f\u0007\u0001\u0000\u0000\u030f\u0310\u0007"+ + "\u0012\u0000\u0000\u0310\u0311\u0007\u0003\u0000\u0000\u0311v\u0001\u0000"+ + "\u0000\u0000\u0312\u0313\u0005(\u0000\u0000\u0313x\u0001\u0000\u0000\u0000"+ + "\u0314\u0315\u0007\t\u0000\u0000\u0315\u0316\u0007\u0007\u0000\u0000\u0316"+ + "\u0317\u0007\u0005\u0000\u0000\u0317z\u0001\u0000\u0000\u0000\u0318\u0319"+ + "\u0007\t\u0000\u0000\u0319\u031a\u0007\u0014\u0000\u0000\u031a\u031b\u0007"+ + "\r\u0000\u0000\u031b\u031c\u0007\r\u0000\u0000\u031c|\u0001\u0000\u0000"+ + "\u0000\u031d\u031e\u0007\t\u0000\u0000\u031e\u031f\u0007\u0014\u0000\u0000"+ + "\u031f\u0320\u0007\r\u0000\u0000\u0320\u0321\u0007\r\u0000\u0000\u0321"+ + "\u0322\u0007\u0002\u0000\u0000\u0322~\u0001\u0000\u0000\u0000\u0323\u0324"+ + "\u0007\u0007\u0000\u0000\u0324\u0325\u0007\u0006\u0000\u0000\u0325\u0080"+ + "\u0001\u0000\u0000\u0000\u0326\u0327\u0005?\u0000\u0000\u0327\u0082\u0001"+ + "\u0000\u0000\u0000\u0328\u0329\u0007\u0006\u0000\u0000\u0329\u032a\u0007"+ + "\r\u0000\u0000\u032a\u032b\u0007\u0001\u0000\u0000\u032b\u032c\u0007\u0012"+ + "\u0000\u0000\u032c\u032d\u0007\u0003\u0000\u0000\u032d\u0084\u0001\u0000"+ + "\u0000\u0000\u032e\u032f\u0005)\u0000\u0000\u032f\u0086\u0001\u0000\u0000"+ + "\u0000\u0330\u0331\u0007\u0005\u0000\u0000\u0331\u0332\u0007\u0006\u0000"+ + "\u0000\u0332\u0333\u0007\u0014\u0000\u0000\u0333\u0334\u0007\u0003\u0000"+ + "\u0000\u0334\u0088\u0001\u0000\u0000\u0000\u0335\u0336\u0005=\u0000\u0000"+ + "\u0336\u0337\u0005=\u0000\u0000\u0337\u008a\u0001\u0000\u0000\u0000\u0338"+ + "\u0339\u0005=\u0000\u0000\u0339\u033a\u0005~\u0000\u0000\u033a\u008c\u0001"+ + "\u0000\u0000\u0000\u033b\u033c\u0005!\u0000\u0000\u033c\u033d\u0005=\u0000"+ + "\u0000\u033d\u008e\u0001\u0000\u0000\u0000\u033e\u033f\u0005<\u0000\u0000"+ + "\u033f\u0090\u0001\u0000\u0000\u0000\u0340\u0341\u0005<\u0000\u0000\u0341"+ + "\u0342\u0005=\u0000\u0000\u0342\u0092\u0001\u0000\u0000\u0000\u0343\u0344"+ + "\u0005>\u0000\u0000\u0344\u0094\u0001\u0000\u0000\u0000\u0345\u0346\u0005"+ + ">\u0000\u0000\u0346\u0347\u0005=\u0000\u0000\u0347\u0096\u0001\u0000\u0000"+ + "\u0000\u0348\u0349\u0005+\u0000\u0000\u0349\u0098\u0001\u0000\u0000\u0000"+ + "\u034a\u034b\u0005-\u0000\u0000\u034b\u009a\u0001\u0000\u0000\u0000\u034c"+ + "\u034d\u0005*\u0000\u0000\u034d\u009c\u0001\u0000\u0000\u0000\u034e\u034f"+ + "\u0005/\u0000\u0000\u034f\u009e\u0001\u0000\u0000\u0000\u0350\u0351\u0005"+ + "%\u0000\u0000\u0351\u00a0\u0001\u0000\u0000\u0000\u0352\u0353\u0004I\u0004"+ + "\u0000\u0353\u0354\u00033\u0012\u0000\u0354\u0355\u0001\u0000\u0000\u0000"+ + "\u0355\u0356\u0006I\f\u0000\u0356\u00a2\u0001\u0000\u0000\u0000\u0357"+ + "\u035a\u0003\u00819\u0000\u0358\u035b\u0003C\u001a\u0000\u0359\u035b\u0003"+ + "Q!\u0000\u035a\u0358\u0001\u0000\u0000\u0000\u035a\u0359\u0001\u0000\u0000"+ + "\u0000\u035b\u035f\u0001\u0000\u0000\u0000\u035c\u035e\u0003S\"\u0000"+ + "\u035d\u035c\u0001\u0000\u0000\u0000\u035e\u0361\u0001\u0000\u0000\u0000"+ + "\u035f\u035d\u0001\u0000\u0000\u0000\u035f\u0360\u0001\u0000\u0000\u0000"+ + "\u0360\u0369\u0001\u0000\u0000\u0000\u0361\u035f\u0001\u0000\u0000\u0000"+ + "\u0362\u0364\u0003\u00819\u0000\u0363\u0365\u0003A\u0019\u0000\u0364\u0363"+ + "\u0001\u0000\u0000\u0000\u0365\u0366\u0001\u0000\u0000\u0000\u0366\u0364"+ + "\u0001\u0000\u0000\u0000\u0366\u0367\u0001\u0000\u0000\u0000\u0367\u0369"+ + "\u0001\u0000\u0000\u0000\u0368\u0357\u0001\u0000\u0000\u0000\u0368\u0362"+ + "\u0001\u0000\u0000\u0000\u0369\u00a4\u0001\u0000\u0000\u0000\u036a\u036b"+ + "\u0005[\u0000\u0000\u036b\u036c\u0001\u0000\u0000\u0000\u036c\u036d\u0006"+ + "K\u0000\u0000\u036d\u036e\u0006K\u0000\u0000\u036e\u00a6\u0001\u0000\u0000"+ + "\u0000\u036f\u0370\u0005]\u0000\u0000\u0370\u0371\u0001\u0000\u0000\u0000"+ + "\u0371\u0372\u0006L\u000b\u0000\u0372\u0373\u0006L\u000b\u0000\u0373\u00a8"+ + "\u0001\u0000\u0000\u0000\u0374\u0378\u0003C\u001a\u0000\u0375\u0377\u0003"+ + "S\"\u0000\u0376\u0375\u0001\u0000\u0000\u0000\u0377\u037a\u0001\u0000"+ + "\u0000\u0000\u0378\u0376\u0001\u0000\u0000\u0000\u0378\u0379\u0001\u0000"+ + "\u0000\u0000\u0379\u0385\u0001\u0000\u0000\u0000\u037a\u0378\u0001\u0000"+ + "\u0000\u0000\u037b\u037e\u0003Q!\u0000\u037c\u037e\u0003K\u001e\u0000"+ + "\u037d\u037b\u0001\u0000\u0000\u0000\u037d\u037c\u0001\u0000\u0000\u0000"+ + "\u037e\u0380\u0001\u0000\u0000\u0000\u037f\u0381\u0003S\"\u0000\u0380"+ + "\u037f\u0001\u0000\u0000\u0000\u0381\u0382\u0001\u0000\u0000\u0000\u0382"+ + "\u0380\u0001\u0000\u0000\u0000\u0382\u0383\u0001\u0000\u0000\u0000\u0383"+ + "\u0385\u0001\u0000\u0000\u0000\u0384\u0374\u0001\u0000\u0000\u0000\u0384"+ + "\u037d\u0001\u0000\u0000\u0000\u0385\u00aa\u0001\u0000\u0000\u0000\u0386"+ + "\u0388\u0003M\u001f\u0000\u0387\u0389\u0003O \u0000\u0388\u0387\u0001"+ + "\u0000\u0000\u0000\u0389\u038a\u0001\u0000\u0000\u0000\u038a\u0388\u0001"+ + "\u0000\u0000\u0000\u038a\u038b\u0001\u0000\u0000\u0000\u038b\u038c\u0001"+ + "\u0000\u0000\u0000\u038c\u038d\u0003M\u001f\u0000\u038d\u00ac\u0001\u0000"+ + "\u0000\u0000\u038e\u038f\u0003\u00abN\u0000\u038f\u00ae\u0001\u0000\u0000"+ + "\u0000\u0390\u0391\u00039\u0015\u0000\u0391\u0392\u0001\u0000\u0000\u0000"+ + "\u0392\u0393\u0006P\n\u0000\u0393\u00b0\u0001\u0000\u0000\u0000\u0394"+ + "\u0395\u0003;\u0016\u0000\u0395\u0396\u0001\u0000\u0000\u0000\u0396\u0397"+ + "\u0006Q\n\u0000\u0397\u00b2\u0001\u0000\u0000\u0000\u0398\u0399\u0003"+ + "=\u0017\u0000\u0399\u039a\u0001\u0000\u0000\u0000\u039a\u039b\u0006R\n"+ + "\u0000\u039b\u00b4\u0001\u0000\u0000\u0000\u039c\u039d\u0003\u00a5K\u0000"+ + "\u039d\u039e\u0001\u0000\u0000\u0000\u039e\u039f\u0006S\r\u0000\u039f"+ + "\u03a0\u0006S\u000e\u0000\u03a0\u00b6\u0001\u0000\u0000\u0000\u03a1\u03a2"+ + "\u0003?\u0018\u0000\u03a2\u03a3\u0001\u0000\u0000\u0000\u03a3\u03a4\u0006"+ + "T\u000f\u0000\u03a4\u03a5\u0006T\u000b\u0000\u03a5\u00b8\u0001\u0000\u0000"+ + "\u0000\u03a6\u03a7\u0003=\u0017\u0000\u03a7\u03a8\u0001\u0000\u0000\u0000"+ + "\u03a8\u03a9\u0006U\n\u0000\u03a9\u00ba\u0001\u0000\u0000\u0000\u03aa"+ + "\u03ab\u00039\u0015\u0000\u03ab\u03ac\u0001\u0000\u0000\u0000\u03ac\u03ad"+ + "\u0006V\n\u0000\u03ad\u00bc\u0001\u0000\u0000\u0000\u03ae\u03af\u0003"+ + ";\u0016\u0000\u03af\u03b0\u0001\u0000\u0000\u0000\u03b0\u03b1\u0006W\n"+ + "\u0000\u03b1\u00be\u0001\u0000\u0000\u0000\u03b2\u03b3\u0003?\u0018\u0000"+ + "\u03b3\u03b4\u0001\u0000\u0000\u0000\u03b4\u03b5\u0006X\u000f\u0000\u03b5"+ + "\u03b6\u0006X\u000b\u0000\u03b6\u00c0\u0001\u0000\u0000\u0000\u03b7\u03b8"+ + "\u0003\u00a5K\u0000\u03b8\u03b9\u0001\u0000\u0000\u0000\u03b9\u03ba\u0006"+ + "Y\r\u0000\u03ba\u00c2\u0001\u0000\u0000\u0000\u03bb\u03bc\u0003\u00a7"+ + "L\u0000\u03bc\u03bd\u0001\u0000\u0000\u0000\u03bd\u03be\u0006Z\u0010\u0000"+ + "\u03be\u00c4\u0001\u0000\u0000\u0000\u03bf\u03c0\u0003\u0141\u0099\u0000"+ + "\u03c0\u03c1\u0001\u0000\u0000\u0000\u03c1\u03c2\u0006[\u0011\u0000\u03c2"+ + "\u00c6\u0001\u0000\u0000\u0000\u03c3\u03c4\u0003e+\u0000\u03c4\u03c5\u0001"+ + "\u0000\u0000\u0000\u03c5\u03c6\u0006\\\u0012\u0000\u03c6\u00c8\u0001\u0000"+ + "\u0000\u0000\u03c7\u03c8\u0003a)\u0000\u03c8\u03c9\u0001\u0000\u0000\u0000"+ + "\u03c9\u03ca\u0006]\u0013\u0000\u03ca\u00ca\u0001\u0000\u0000\u0000\u03cb"+ + "\u03cc\u0007\u0010\u0000\u0000\u03cc\u03cd\u0007\u0003\u0000\u0000\u03cd"+ + "\u03ce\u0007\u0005\u0000\u0000\u03ce\u03cf\u0007\f\u0000\u0000\u03cf\u03d0"+ + "\u0007\u0000\u0000\u0000\u03d0\u03d1\u0007\f\u0000\u0000\u03d1\u03d2\u0007"+ + "\u0005\u0000\u0000\u03d2\u03d3\u0007\f\u0000\u0000\u03d3\u00cc\u0001\u0000"+ + "\u0000\u0000\u03d4\u03d8\b \u0000\u0000\u03d5\u03d6\u0005/\u0000\u0000"+ + "\u03d6\u03d8\b!\u0000\u0000\u03d7\u03d4\u0001\u0000\u0000\u0000\u03d7"+ + "\u03d5\u0001\u0000\u0000\u0000\u03d8\u00ce\u0001\u0000\u0000\u0000\u03d9"+ + "\u03db\u0003\u00cd_\u0000\u03da\u03d9\u0001\u0000\u0000\u0000\u03db\u03dc"+ + "\u0001\u0000\u0000\u0000\u03dc\u03da\u0001\u0000\u0000\u0000\u03dc\u03dd"+ + "\u0001\u0000\u0000\u0000\u03dd\u00d0\u0001\u0000\u0000\u0000\u03de\u03df"+ + "\u0003\u00cf`\u0000\u03df\u03e0\u0001\u0000\u0000\u0000\u03e0\u03e1\u0006"+ + "a\u0014\u0000\u03e1\u00d2\u0001\u0000\u0000\u0000\u03e2\u03e3\u0003U#"+ + "\u0000\u03e3\u03e4\u0001\u0000\u0000\u0000\u03e4\u03e5\u0006b\u0015\u0000"+ + "\u03e5\u00d4\u0001\u0000\u0000\u0000\u03e6\u03e7\u00039\u0015\u0000\u03e7"+ + "\u03e8\u0001\u0000\u0000\u0000\u03e8\u03e9\u0006c\n\u0000\u03e9\u00d6"+ + "\u0001\u0000\u0000\u0000\u03ea\u03eb\u0003;\u0016\u0000\u03eb\u03ec\u0001"+ + "\u0000\u0000\u0000\u03ec\u03ed\u0006d\n\u0000\u03ed\u00d8\u0001\u0000"+ + "\u0000\u0000\u03ee\u03ef\u0003=\u0017\u0000\u03ef\u03f0\u0001\u0000\u0000"+ + "\u0000\u03f0\u03f1\u0006e\n\u0000\u03f1\u00da\u0001\u0000\u0000\u0000"+ + "\u03f2\u03f3\u0003?\u0018\u0000\u03f3\u03f4\u0001\u0000\u0000\u0000\u03f4"+ + "\u03f5\u0006f\u000f\u0000\u03f5\u03f6\u0006f\u000b\u0000\u03f6\u00dc\u0001"+ + "\u0000\u0000\u0000\u03f7\u03f8\u0003i-\u0000\u03f8\u03f9\u0001\u0000\u0000"+ + "\u0000\u03f9\u03fa\u0006g\u0016\u0000\u03fa\u00de\u0001\u0000\u0000\u0000"+ + "\u03fb\u03fc\u0003e+\u0000\u03fc\u03fd\u0001\u0000\u0000\u0000\u03fd\u03fe"+ + "\u0006h\u0012\u0000\u03fe\u00e0\u0001\u0000\u0000\u0000\u03ff\u0404\u0003"+ + "C\u001a\u0000\u0400\u0404\u0003A\u0019\u0000\u0401\u0404\u0003Q!\u0000"+ + "\u0402\u0404\u0003\u009bF\u0000\u0403\u03ff\u0001\u0000\u0000\u0000\u0403"+ + "\u0400\u0001\u0000\u0000\u0000\u0403\u0401\u0001\u0000\u0000\u0000\u0403"+ + "\u0402\u0001\u0000\u0000\u0000\u0404\u00e2\u0001\u0000\u0000\u0000\u0405"+ + "\u0408\u0003C\u001a\u0000\u0406\u0408\u0003\u009bF\u0000\u0407\u0405\u0001"+ + "\u0000\u0000\u0000\u0407\u0406\u0001\u0000\u0000\u0000\u0408\u040c\u0001"+ + "\u0000\u0000\u0000\u0409\u040b\u0003\u00e1i\u0000\u040a\u0409\u0001\u0000"+ + "\u0000\u0000\u040b\u040e\u0001\u0000\u0000\u0000\u040c\u040a\u0001\u0000"+ + "\u0000\u0000\u040c\u040d\u0001\u0000\u0000\u0000\u040d\u0419\u0001\u0000"+ + "\u0000\u0000\u040e\u040c\u0001\u0000\u0000\u0000\u040f\u0412\u0003Q!\u0000"+ + "\u0410\u0412\u0003K\u001e\u0000\u0411\u040f\u0001\u0000\u0000\u0000\u0411"+ + "\u0410\u0001\u0000\u0000\u0000\u0412\u0414\u0001\u0000\u0000\u0000\u0413"+ + "\u0415\u0003\u00e1i\u0000\u0414\u0413\u0001\u0000\u0000\u0000\u0415\u0416"+ + "\u0001\u0000\u0000\u0000\u0416\u0414\u0001\u0000\u0000\u0000\u0416\u0417"+ + "\u0001\u0000\u0000\u0000\u0417\u0419\u0001\u0000\u0000\u0000\u0418\u0407"+ + "\u0001\u0000\u0000\u0000\u0418\u0411\u0001\u0000\u0000\u0000\u0419\u00e4"+ + "\u0001\u0000\u0000\u0000\u041a\u041d\u0003\u00e3j\u0000\u041b\u041d\u0003"+ + "\u00abN\u0000\u041c\u041a\u0001\u0000\u0000\u0000\u041c\u041b\u0001\u0000"+ + "\u0000\u0000\u041d\u041e\u0001\u0000\u0000\u0000\u041e\u041c\u0001\u0000"+ + "\u0000\u0000\u041e\u041f\u0001\u0000\u0000\u0000\u041f\u00e6\u0001\u0000"+ + "\u0000\u0000\u0420\u0421\u00039\u0015\u0000\u0421\u0422\u0001\u0000\u0000"+ + "\u0000\u0422\u0423\u0006l\n\u0000\u0423\u00e8\u0001\u0000\u0000\u0000"+ + "\u0424\u0425\u0003;\u0016\u0000\u0425\u0426\u0001\u0000\u0000\u0000\u0426"+ + "\u0427\u0006m\n\u0000\u0427\u00ea\u0001\u0000\u0000\u0000\u0428\u0429"+ + "\u0003=\u0017\u0000\u0429\u042a\u0001\u0000\u0000\u0000\u042a\u042b\u0006"+ + "n\n\u0000\u042b\u00ec\u0001\u0000\u0000\u0000\u042c\u042d\u0003?\u0018"+ + "\u0000\u042d\u042e\u0001\u0000\u0000\u0000\u042e\u042f\u0006o\u000f\u0000"+ + "\u042f\u0430\u0006o\u000b\u0000\u0430\u00ee\u0001\u0000\u0000\u0000\u0431"+ + "\u0432\u0003a)\u0000\u0432\u0433\u0001\u0000\u0000\u0000\u0433\u0434\u0006"+ + "p\u0013\u0000\u0434\u00f0\u0001\u0000\u0000\u0000\u0435\u0436\u0003e+"+ + "\u0000\u0436\u0437\u0001\u0000\u0000\u0000\u0437\u0438\u0006q\u0012\u0000"+ + "\u0438\u00f2\u0001\u0000\u0000\u0000\u0439\u043a\u0003i-\u0000\u043a\u043b"+ + "\u0001\u0000\u0000\u0000\u043b\u043c\u0006r\u0016\u0000\u043c\u00f4\u0001"+ + "\u0000\u0000\u0000\u043d\u043e\u0007\f\u0000\u0000\u043e\u043f\u0007\u0002"+ + "\u0000\u0000\u043f\u00f6\u0001\u0000\u0000\u0000\u0440\u0441\u0003\u00e5"+ + "k\u0000\u0441\u0442\u0001\u0000\u0000\u0000\u0442\u0443\u0006t\u0017\u0000"+ + "\u0443\u00f8\u0001\u0000\u0000\u0000\u0444\u0445\u00039\u0015\u0000\u0445"+ + "\u0446\u0001\u0000\u0000\u0000\u0446\u0447\u0006u\n\u0000\u0447\u00fa"+ + "\u0001\u0000\u0000\u0000\u0448\u0449\u0003;\u0016\u0000\u0449\u044a\u0001"+ + "\u0000\u0000\u0000\u044a\u044b\u0006v\n\u0000\u044b\u00fc\u0001\u0000"+ + "\u0000\u0000\u044c\u044d\u0003=\u0017\u0000\u044d\u044e\u0001\u0000\u0000"+ + "\u0000\u044e\u044f\u0006w\n\u0000\u044f\u00fe\u0001\u0000\u0000\u0000"+ + "\u0450\u0451\u0003?\u0018\u0000\u0451\u0452\u0001\u0000\u0000\u0000\u0452"+ + "\u0453\u0006x\u000f\u0000\u0453\u0454\u0006x\u000b\u0000\u0454\u0100\u0001"+ + "\u0000\u0000\u0000\u0455\u0456\u0003\u00a5K\u0000\u0456\u0457\u0001\u0000"+ + "\u0000\u0000\u0457\u0458\u0006y\r\u0000\u0458\u0459\u0006y\u0018\u0000"+ + "\u0459\u0102\u0001\u0000\u0000\u0000\u045a\u045b\u0007\u0007\u0000\u0000"+ + "\u045b\u045c\u0007\t\u0000\u0000\u045c\u045d\u0001\u0000\u0000\u0000\u045d"+ + "\u045e\u0006z\u0019\u0000\u045e\u0104\u0001\u0000\u0000\u0000\u045f\u0460"+ + "\u0007\u0013\u0000\u0000\u0460\u0461\u0007\u0001\u0000\u0000\u0461\u0462"+ + "\u0007\u0005\u0000\u0000\u0462\u0463\u0007\n\u0000\u0000\u0463\u0464\u0001"+ + "\u0000\u0000\u0000\u0464\u0465\u0006{\u0019\u0000\u0465\u0106\u0001\u0000"+ + "\u0000\u0000\u0466\u0467\b\"\u0000\u0000\u0467\u0108\u0001\u0000\u0000"+ + "\u0000\u0468\u046a\u0003\u0107|\u0000\u0469\u0468\u0001\u0000\u0000\u0000"+ + "\u046a\u046b\u0001\u0000\u0000\u0000\u046b\u0469\u0001\u0000\u0000\u0000"+ + "\u046b\u046c\u0001\u0000\u0000\u0000\u046c\u046d\u0001\u0000\u0000\u0000"+ + "\u046d\u046e\u0003\u0141\u0099\u0000\u046e\u0470\u0001\u0000\u0000\u0000"+ + "\u046f\u0469\u0001\u0000\u0000\u0000\u046f\u0470\u0001\u0000\u0000\u0000"+ + "\u0470\u0472\u0001\u0000\u0000\u0000\u0471\u0473\u0003\u0107|\u0000\u0472"+ + "\u0471\u0001\u0000\u0000\u0000\u0473\u0474\u0001\u0000\u0000\u0000\u0474"+ + "\u0472\u0001\u0000\u0000\u0000\u0474\u0475\u0001\u0000\u0000\u0000\u0475"+ + "\u010a\u0001\u0000\u0000\u0000\u0476\u0477\u0003\u0109}\u0000\u0477\u0478"+ + "\u0001\u0000\u0000\u0000\u0478\u0479\u0006~\u001a\u0000\u0479\u010c\u0001"+ + "\u0000\u0000\u0000\u047a\u047b\u00039\u0015\u0000\u047b\u047c\u0001\u0000"+ + "\u0000\u0000\u047c\u047d\u0006\u007f\n\u0000\u047d\u010e\u0001\u0000\u0000"+ + "\u0000\u047e\u047f\u0003;\u0016\u0000\u047f\u0480\u0001\u0000\u0000\u0000"+ + "\u0480\u0481\u0006\u0080\n\u0000\u0481\u0110\u0001\u0000\u0000\u0000\u0482"+ + "\u0483\u0003=\u0017\u0000\u0483\u0484\u0001\u0000\u0000\u0000\u0484\u0485"+ + "\u0006\u0081\n\u0000\u0485\u0112\u0001\u0000\u0000\u0000\u0486\u0487\u0003"+ + "?\u0018\u0000\u0487\u0488\u0001\u0000\u0000\u0000\u0488\u0489\u0006\u0082"+ + "\u000f\u0000\u0489\u048a\u0006\u0082\u000b\u0000\u048a\u048b\u0006\u0082"+ + "\u000b\u0000\u048b\u0114\u0001\u0000\u0000\u0000\u048c\u048d\u0003a)\u0000"+ + "\u048d\u048e\u0001\u0000\u0000\u0000\u048e\u048f\u0006\u0083\u0013\u0000"+ + "\u048f\u0116\u0001\u0000\u0000\u0000\u0490\u0491\u0003e+\u0000\u0491\u0492"+ + "\u0001\u0000\u0000\u0000\u0492\u0493\u0006\u0084\u0012\u0000\u0493\u0118"+ + "\u0001\u0000\u0000\u0000\u0494\u0495\u0003i-\u0000\u0495\u0496\u0001\u0000"+ + "\u0000\u0000\u0496\u0497\u0006\u0085\u0016\u0000\u0497\u011a\u0001\u0000"+ + "\u0000\u0000\u0498\u0499\u0003\u0105{\u0000\u0499\u049a\u0001\u0000\u0000"+ + "\u0000\u049a\u049b\u0006\u0086\u001b\u0000\u049b\u011c\u0001\u0000\u0000"+ + "\u0000\u049c\u049d\u0003\u00e5k\u0000\u049d\u049e\u0001\u0000\u0000\u0000"+ + "\u049e\u049f\u0006\u0087\u0017\u0000\u049f\u011e\u0001\u0000\u0000\u0000"+ + "\u04a0\u04a1\u0003\u00adO\u0000\u04a1\u04a2\u0001\u0000\u0000\u0000\u04a2"+ + "\u04a3\u0006\u0088\u001c\u0000\u04a3\u0120\u0001\u0000\u0000\u0000\u04a4"+ + "\u04a5\u00039\u0015\u0000\u04a5\u04a6\u0001\u0000\u0000\u0000\u04a6\u04a7"+ + "\u0006\u0089\n\u0000\u04a7\u0122\u0001\u0000\u0000\u0000\u04a8\u04a9\u0003"+ + ";\u0016\u0000\u04a9\u04aa\u0001\u0000\u0000\u0000\u04aa\u04ab\u0006\u008a"+ + "\n\u0000\u04ab\u0124\u0001\u0000\u0000\u0000\u04ac\u04ad\u0003=\u0017"+ + "\u0000\u04ad\u04ae\u0001\u0000\u0000\u0000\u04ae\u04af\u0006\u008b\n\u0000"+ + "\u04af\u0126\u0001\u0000\u0000\u0000\u04b0\u04b1\u0003?\u0018\u0000\u04b1"+ + "\u04b2\u0001\u0000\u0000\u0000\u04b2\u04b3\u0006\u008c\u000f\u0000\u04b3"+ + "\u04b4\u0006\u008c\u000b\u0000\u04b4\u0128\u0001\u0000\u0000\u0000\u04b5"+ + "\u04b6\u0003i-\u0000\u04b6\u04b7\u0001\u0000\u0000\u0000\u04b7\u04b8\u0006"+ + "\u008d\u0016\u0000\u04b8\u012a\u0001\u0000\u0000\u0000\u04b9\u04ba\u0003"+ + "\u00adO\u0000\u04ba\u04bb\u0001\u0000\u0000\u0000\u04bb\u04bc\u0006\u008e"+ + "\u001c\u0000\u04bc\u012c\u0001\u0000\u0000\u0000\u04bd\u04be\u0003\u00a9"+ + "M\u0000\u04be\u04bf\u0001\u0000\u0000\u0000\u04bf\u04c0\u0006\u008f\u001d"+ + "\u0000\u04c0\u012e\u0001\u0000\u0000\u0000\u04c1\u04c2\u00039\u0015\u0000"+ + "\u04c2\u04c3\u0001\u0000\u0000\u0000\u04c3\u04c4\u0006\u0090\n\u0000\u04c4"+ + "\u0130\u0001\u0000\u0000\u0000\u04c5\u04c6\u0003;\u0016\u0000\u04c6\u04c7"+ + "\u0001\u0000\u0000\u0000\u04c7\u04c8\u0006\u0091\n\u0000\u04c8\u0132\u0001"+ + "\u0000\u0000\u0000\u04c9\u04ca\u0003=\u0017\u0000\u04ca\u04cb\u0001\u0000"+ + "\u0000\u0000\u04cb\u04cc\u0006\u0092\n\u0000\u04cc\u0134\u0001\u0000\u0000"+ + "\u0000\u04cd\u04ce\u0003?\u0018\u0000\u04ce\u04cf\u0001\u0000\u0000\u0000"+ + "\u04cf\u04d0\u0006\u0093\u000f\u0000\u04d0\u04d1\u0006\u0093\u000b\u0000"+ + "\u04d1\u0136\u0001\u0000\u0000\u0000\u04d2\u04d3\u0007\u0001\u0000\u0000"+ + "\u04d3\u04d4\u0007\t\u0000\u0000\u04d4\u04d5\u0007\u000f\u0000\u0000\u04d5"+ + "\u04d6\u0007\u0007\u0000\u0000\u04d6\u0138\u0001\u0000\u0000\u0000\u04d7"+ + "\u04d8\u00039\u0015\u0000\u04d8\u04d9\u0001\u0000\u0000\u0000\u04d9\u04da"+ + "\u0006\u0095\n\u0000\u04da\u013a\u0001\u0000\u0000\u0000\u04db\u04dc\u0003"+ + ";\u0016\u0000\u04dc\u04dd\u0001\u0000\u0000\u0000\u04dd\u04de\u0006\u0096"+ + "\n\u0000\u04de\u013c\u0001\u0000\u0000\u0000\u04df\u04e0\u0003=\u0017"+ + "\u0000\u04e0\u04e1\u0001\u0000\u0000\u0000\u04e1\u04e2\u0006\u0097\n\u0000"+ + "\u04e2\u013e\u0001\u0000\u0000\u0000\u04e3\u04e4\u0003\u00a7L\u0000\u04e4"+ + "\u04e5\u0001\u0000\u0000\u0000\u04e5\u04e6\u0006\u0098\u0010\u0000\u04e6"+ + "\u04e7\u0006\u0098\u000b\u0000\u04e7\u0140\u0001\u0000\u0000\u0000\u04e8"+ + "\u04e9\u0005:\u0000\u0000\u04e9\u0142\u0001\u0000\u0000\u0000\u04ea\u04f0"+ + "\u0003K\u001e\u0000\u04eb\u04f0\u0003A\u0019\u0000\u04ec\u04f0\u0003i"+ + "-\u0000\u04ed\u04f0\u0003C\u001a\u0000\u04ee\u04f0\u0003Q!\u0000\u04ef"+ + "\u04ea\u0001\u0000\u0000\u0000\u04ef\u04eb\u0001\u0000\u0000\u0000\u04ef"+ + "\u04ec\u0001\u0000\u0000\u0000\u04ef\u04ed\u0001\u0000\u0000\u0000\u04ef"+ + "\u04ee\u0001\u0000\u0000\u0000\u04f0\u04f1\u0001\u0000\u0000\u0000\u04f1"+ + "\u04ef\u0001\u0000\u0000\u0000\u04f1\u04f2\u0001\u0000\u0000\u0000\u04f2"+ + "\u0144\u0001\u0000\u0000\u0000\u04f3\u04f4\u00039\u0015\u0000\u04f4\u04f5"+ + "\u0001\u0000\u0000\u0000\u04f5\u04f6\u0006\u009b\n\u0000\u04f6\u0146\u0001"+ + "\u0000\u0000\u0000\u04f7\u04f8\u0003;\u0016\u0000\u04f8\u04f9\u0001\u0000"+ + "\u0000\u0000\u04f9\u04fa\u0006\u009c\n\u0000\u04fa\u0148\u0001\u0000\u0000"+ + "\u0000\u04fb\u04fc\u0003=\u0017\u0000\u04fc\u04fd\u0001\u0000\u0000\u0000"+ + "\u04fd\u04fe\u0006\u009d\n\u0000\u04fe\u014a\u0001\u0000\u0000\u0000\u04ff"+ + "\u0500\u0003?\u0018\u0000\u0500\u0501\u0001\u0000\u0000\u0000\u0501\u0502"+ + "\u0006\u009e\u000f\u0000\u0502\u0503\u0006\u009e\u000b\u0000\u0503\u014c"+ + "\u0001\u0000\u0000\u0000\u0504\u0505\u0003\u0141\u0099\u0000\u0505\u0506"+ + "\u0001\u0000\u0000\u0000\u0506\u0507\u0006\u009f\u0011\u0000\u0507\u014e"+ + "\u0001\u0000\u0000\u0000\u0508\u0509\u0003e+\u0000\u0509\u050a\u0001\u0000"+ + "\u0000\u0000\u050a\u050b\u0006\u00a0\u0012\u0000\u050b\u0150\u0001\u0000"+ + "\u0000\u0000\u050c\u050d\u0003i-\u0000\u050d\u050e\u0001\u0000\u0000\u0000"+ + "\u050e\u050f\u0006\u00a1\u0016\u0000\u050f\u0152\u0001\u0000\u0000\u0000"+ + "\u0510\u0511\u0003\u0103z\u0000\u0511\u0512\u0001\u0000\u0000\u0000\u0512"+ + "\u0513\u0006\u00a2\u001e\u0000\u0513\u0514\u0006\u00a2\u001f\u0000\u0514"+ + "\u0154\u0001\u0000\u0000\u0000\u0515\u0516\u0003\u00cf`\u0000\u0516\u0517"+ + "\u0001\u0000\u0000\u0000\u0517\u0518\u0006\u00a3\u0014\u0000\u0518\u0156"+ + "\u0001\u0000\u0000\u0000\u0519\u051a\u0003U#\u0000\u051a\u051b\u0001\u0000"+ + "\u0000\u0000\u051b\u051c\u0006\u00a4\u0015\u0000\u051c\u0158\u0001\u0000"+ + "\u0000\u0000\u051d\u051e\u00039\u0015\u0000\u051e\u051f\u0001\u0000\u0000"+ + "\u0000\u051f\u0520\u0006\u00a5\n\u0000\u0520\u015a\u0001\u0000\u0000\u0000"+ + "\u0521\u0522\u0003;\u0016\u0000\u0522\u0523\u0001\u0000\u0000\u0000\u0523"+ + "\u0524\u0006\u00a6\n\u0000\u0524\u015c\u0001\u0000\u0000\u0000\u0525\u0526"+ + "\u0003=\u0017\u0000\u0526\u0527\u0001\u0000\u0000\u0000\u0527\u0528\u0006"+ + "\u00a7\n\u0000\u0528\u015e\u0001\u0000\u0000\u0000\u0529\u052a\u0003?"+ + "\u0018\u0000\u052a\u052b\u0001\u0000\u0000\u0000\u052b\u052c\u0006\u00a8"+ + "\u000f\u0000\u052c\u052d\u0006\u00a8\u000b\u0000\u052d\u052e\u0006\u00a8"+ + "\u000b\u0000\u052e\u0160\u0001\u0000\u0000\u0000\u052f\u0530\u0003e+\u0000"+ + "\u0530\u0531\u0001\u0000\u0000\u0000\u0531\u0532\u0006\u00a9\u0012\u0000"+ + "\u0532\u0162\u0001\u0000\u0000\u0000\u0533\u0534\u0003i-\u0000\u0534\u0535"+ + "\u0001\u0000\u0000\u0000\u0535\u0536\u0006\u00aa\u0016\u0000\u0536\u0164"+ + "\u0001\u0000\u0000\u0000\u0537\u0538\u0003\u00e5k\u0000\u0538\u0539\u0001"+ + "\u0000\u0000\u0000\u0539\u053a\u0006\u00ab\u0017\u0000\u053a\u0166\u0001"+ + "\u0000\u0000\u0000\u053b\u053c\u00039\u0015\u0000\u053c\u053d\u0001\u0000"+ + "\u0000\u0000\u053d\u053e\u0006\u00ac\n\u0000\u053e\u0168\u0001\u0000\u0000"+ + "\u0000\u053f\u0540\u0003;\u0016\u0000\u0540\u0541\u0001\u0000\u0000\u0000"+ + "\u0541\u0542\u0006\u00ad\n\u0000\u0542\u016a\u0001\u0000\u0000\u0000\u0543"+ + "\u0544\u0003=\u0017\u0000\u0544\u0545\u0001\u0000\u0000\u0000\u0545\u0546"+ + "\u0006\u00ae\n\u0000\u0546\u016c\u0001\u0000\u0000\u0000\u0547\u0548\u0003"+ + "?\u0018\u0000\u0548\u0549\u0001\u0000\u0000\u0000\u0549\u054a\u0006\u00af"+ + "\u000f\u0000\u054a\u054b\u0006\u00af\u000b\u0000\u054b\u016e\u0001\u0000"+ + "\u0000\u0000\u054c\u054d\u0003\u00cf`\u0000\u054d\u054e\u0001\u0000\u0000"+ + "\u0000\u054e\u054f\u0006\u00b0\u0014\u0000\u054f\u0550\u0006\u00b0\u000b"+ + "\u0000\u0550\u0551\u0006\u00b0 \u0000\u0551\u0170\u0001\u0000\u0000\u0000"+ + "\u0552\u0553\u0003U#\u0000\u0553\u0554\u0001\u0000\u0000\u0000\u0554\u0555"+ + "\u0006\u00b1\u0015\u0000\u0555\u0556\u0006\u00b1\u000b\u0000\u0556\u0557"+ + "\u0006\u00b1 \u0000\u0557\u0172\u0001\u0000\u0000\u0000\u0558\u0559\u0003"+ + "9\u0015\u0000\u0559\u055a\u0001\u0000\u0000\u0000\u055a\u055b\u0006\u00b2"+ + "\n\u0000\u055b\u0174\u0001\u0000\u0000\u0000\u055c\u055d\u0003;\u0016"+ + "\u0000\u055d\u055e\u0001\u0000\u0000\u0000\u055e\u055f\u0006\u00b3\n\u0000"+ + "\u055f\u0176\u0001\u0000\u0000\u0000\u0560\u0561\u0003=\u0017\u0000\u0561"+ + "\u0562\u0001\u0000\u0000\u0000\u0562\u0563\u0006\u00b4\n\u0000\u0563\u0178"+ + "\u0001\u0000\u0000\u0000\u0564\u0565\u0003\u0141\u0099\u0000\u0565\u0566"+ + "\u0001\u0000\u0000\u0000\u0566\u0567\u0006\u00b5\u0011\u0000\u0567\u0568"+ + "\u0006\u00b5\u000b\u0000\u0568\u0569\u0006\u00b5\t\u0000\u0569\u017a\u0001"+ + "\u0000\u0000\u0000\u056a\u056b\u0003e+\u0000\u056b\u056c\u0001\u0000\u0000"+ + "\u0000\u056c\u056d\u0006\u00b6\u0012\u0000\u056d\u056e\u0006\u00b6\u000b"+ + "\u0000\u056e\u056f\u0006\u00b6\t\u0000\u056f\u017c\u0001\u0000\u0000\u0000"+ + "\u0570\u0571\u00039\u0015\u0000\u0571\u0572\u0001\u0000\u0000\u0000\u0572"+ + "\u0573\u0006\u00b7\n\u0000\u0573\u017e\u0001\u0000\u0000\u0000\u0574\u0575"+ + "\u0003;\u0016\u0000\u0575\u0576\u0001\u0000\u0000\u0000\u0576\u0577\u0006"+ + "\u00b8\n\u0000\u0577\u0180\u0001\u0000\u0000\u0000\u0578\u0579\u0003="+ + "\u0017\u0000\u0579\u057a\u0001\u0000\u0000\u0000\u057a\u057b\u0006\u00b9"+ + "\n\u0000\u057b\u0182\u0001\u0000\u0000\u0000\u057c\u057d\u0003\u00adO"+ + "\u0000\u057d\u057e\u0001\u0000\u0000\u0000\u057e\u057f\u0006\u00ba\u000b"+ + "\u0000\u057f\u0580\u0006\u00ba\u0000\u0000\u0580\u0581\u0006\u00ba\u001c"+ + "\u0000\u0581\u0184\u0001\u0000\u0000\u0000\u0582\u0583\u0003\u00a9M\u0000"+ + "\u0583\u0584\u0001\u0000\u0000\u0000\u0584\u0585\u0006\u00bb\u000b\u0000"+ + "\u0585\u0586\u0006\u00bb\u0000\u0000\u0586\u0587\u0006\u00bb\u001d\u0000"+ + "\u0587\u0186\u0001\u0000\u0000\u0000\u0588\u0589\u0003[&\u0000\u0589\u058a"+ + "\u0001\u0000\u0000\u0000\u058a\u058b\u0006\u00bc\u000b\u0000\u058b\u058c"+ + "\u0006\u00bc\u0000\u0000\u058c\u058d\u0006\u00bc!\u0000\u058d\u0188\u0001"+ + "\u0000\u0000\u0000\u058e\u058f\u0003?\u0018\u0000\u058f\u0590\u0001\u0000"+ + "\u0000\u0000\u0590\u0591\u0006\u00bd\u000f\u0000\u0591\u0592\u0006\u00bd"+ + "\u000b\u0000\u0592\u018a\u0001\u0000\u0000\u0000A\u0000\u0001\u0002\u0003"+ + "\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u023c\u0246\u024a\u024d"+ + "\u0256\u0258\u0263\u0276\u027b\u0284\u028b\u0290\u0292\u029d\u02a5\u02a8"+ + "\u02aa\u02af\u02b4\u02ba\u02c1\u02c6\u02cc\u02cf\u02d7\u02db\u035a\u035f"+ + "\u0366\u0368\u0378\u037d\u0382\u0384\u038a\u03d7\u03dc\u0403\u0407\u040c"+ + "\u0411\u0416\u0418\u041c\u041e\u046b\u046f\u0474\u04ef\u04f1\"\u0005\u0001"+ + "\u0000\u0005\u0004\u0000\u0005\u0006\u0000\u0005\u0002\u0000\u0005\u0003"+ + "\u0000\u0005\b\u0000\u0005\u0005\u0000\u0005\t\u0000\u0005\u000b\u0000"+ + "\u0005\r\u0000\u0000\u0001\u0000\u0004\u0000\u0000\u0007\u0013\u0000\u0007"+ + "A\u0000\u0005\u0000\u0000\u0007\u0019\u0000\u0007B\u0000\u0007h\u0000"+ + "\u0007\"\u0000\u0007 \u0000\u0007L\u0000\u0007\u001a\u0000\u0007$\u0000"+ + "\u0007P\u0000\u0005\n\u0000\u0005\u0007\u0000\u0007Z\u0000\u0007Y\u0000"+ + "\u0007D\u0000\u0007C\u0000\u0007X\u0000\u0005\f\u0000\u0005\u000e\u0000"+ + "\u0007\u001d\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index f7eed3e9be79..ae34d683403f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -9,7 +9,6 @@ null 'grok' 'keep' 'limit' -'meta' 'mv_expand' 'rename' 'row' @@ -104,10 +103,6 @@ null null null null -'functions' -null -null -null ':' null null @@ -137,7 +132,6 @@ FROM GROK KEEP LIMIT -META MV_EXPAND RENAME ROW @@ -232,10 +226,6 @@ INFO SHOW_LINE_COMMENT SHOW_MULTILINE_COMMENT SHOW_WS -FUNCTIONS -META_LINE_COMMENT -META_MULTILINE_COMMENT -META_WS COLON SETTING SETTING_LINE_COMMENT @@ -309,7 +299,6 @@ comparisonOperator explainCommand subqueryExpression showCommand -metaCommand enrichCommand enrichWithClause lookupCommand @@ -317,4 +306,4 @@ inlinestatsCommand atn: -[4, 1, 125, 578, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 128, 8, 1, 10, 1, 12, 1, 131, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 140, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 158, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 170, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 177, 8, 5, 10, 5, 12, 5, 180, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 187, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 193, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 201, 8, 5, 10, 5, 12, 5, 204, 9, 5, 1, 6, 1, 6, 3, 6, 208, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 215, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 220, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 231, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 237, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 245, 8, 9, 10, 9, 12, 9, 248, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 258, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 263, 8, 10, 10, 10, 12, 10, 266, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 274, 8, 11, 10, 11, 12, 11, 277, 9, 11, 3, 11, 279, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 5, 14, 291, 8, 14, 10, 14, 12, 14, 294, 9, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 301, 8, 15, 1, 16, 1, 16, 1, 16, 1, 16, 5, 16, 307, 8, 16, 10, 16, 12, 16, 310, 9, 16, 1, 16, 3, 16, 313, 8, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 320, 8, 17, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 3, 20, 328, 8, 20, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 334, 8, 21, 10, 21, 12, 21, 337, 9, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 5, 23, 347, 8, 23, 10, 23, 12, 23, 350, 9, 23, 1, 23, 3, 23, 353, 8, 23, 1, 23, 1, 23, 3, 23, 357, 8, 23, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 3, 25, 364, 8, 25, 1, 25, 1, 25, 3, 25, 368, 8, 25, 1, 26, 1, 26, 1, 26, 5, 26, 373, 8, 26, 10, 26, 12, 26, 376, 9, 26, 1, 27, 1, 27, 1, 27, 5, 27, 381, 8, 27, 10, 27, 12, 27, 384, 9, 27, 1, 28, 1, 28, 1, 28, 5, 28, 389, 8, 28, 10, 28, 12, 28, 392, 9, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 411, 8, 31, 10, 31, 12, 31, 414, 9, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 422, 8, 31, 10, 31, 12, 31, 425, 9, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 433, 8, 31, 10, 31, 12, 31, 436, 9, 31, 1, 31, 1, 31, 3, 31, 440, 8, 31, 1, 32, 1, 32, 3, 32, 444, 8, 32, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 453, 8, 34, 10, 34, 12, 34, 456, 9, 34, 1, 35, 1, 35, 3, 35, 460, 8, 35, 1, 35, 1, 35, 3, 35, 464, 8, 35, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 476, 8, 38, 10, 38, 12, 38, 479, 9, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40, 3, 40, 489, 8, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 5, 43, 501, 8, 43, 10, 43, 12, 43, 504, 9, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 3, 46, 514, 8, 46, 1, 47, 3, 47, 517, 8, 47, 1, 47, 1, 47, 1, 48, 3, 48, 522, 8, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 3, 55, 547, 8, 55, 1, 55, 1, 55, 1, 55, 1, 55, 5, 55, 553, 8, 55, 10, 55, 12, 55, 556, 9, 55, 3, 55, 558, 8, 55, 1, 56, 1, 56, 1, 56, 3, 56, 563, 8, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 576, 8, 58, 1, 58, 0, 4, 2, 10, 18, 20, 59, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 0, 8, 1, 0, 60, 61, 1, 0, 62, 64, 2, 0, 27, 27, 77, 77, 1, 0, 68, 69, 2, 0, 32, 32, 36, 36, 2, 0, 39, 39, 42, 42, 2, 0, 38, 38, 52, 52, 2, 0, 53, 53, 55, 59, 603, 0, 118, 1, 0, 0, 0, 2, 121, 1, 0, 0, 0, 4, 139, 1, 0, 0, 0, 6, 157, 1, 0, 0, 0, 8, 159, 1, 0, 0, 0, 10, 192, 1, 0, 0, 0, 12, 219, 1, 0, 0, 0, 14, 221, 1, 0, 0, 0, 16, 230, 1, 0, 0, 0, 18, 236, 1, 0, 0, 0, 20, 257, 1, 0, 0, 0, 22, 267, 1, 0, 0, 0, 24, 282, 1, 0, 0, 0, 26, 284, 1, 0, 0, 0, 28, 287, 1, 0, 0, 0, 30, 300, 1, 0, 0, 0, 32, 302, 1, 0, 0, 0, 34, 319, 1, 0, 0, 0, 36, 321, 1, 0, 0, 0, 38, 323, 1, 0, 0, 0, 40, 327, 1, 0, 0, 0, 42, 329, 1, 0, 0, 0, 44, 338, 1, 0, 0, 0, 46, 342, 1, 0, 0, 0, 48, 358, 1, 0, 0, 0, 50, 361, 1, 0, 0, 0, 52, 369, 1, 0, 0, 0, 54, 377, 1, 0, 0, 0, 56, 385, 1, 0, 0, 0, 58, 393, 1, 0, 0, 0, 60, 395, 1, 0, 0, 0, 62, 439, 1, 0, 0, 0, 64, 443, 1, 0, 0, 0, 66, 445, 1, 0, 0, 0, 68, 448, 1, 0, 0, 0, 70, 457, 1, 0, 0, 0, 72, 465, 1, 0, 0, 0, 74, 468, 1, 0, 0, 0, 76, 471, 1, 0, 0, 0, 78, 480, 1, 0, 0, 0, 80, 484, 1, 0, 0, 0, 82, 490, 1, 0, 0, 0, 84, 494, 1, 0, 0, 0, 86, 497, 1, 0, 0, 0, 88, 505, 1, 0, 0, 0, 90, 509, 1, 0, 0, 0, 92, 513, 1, 0, 0, 0, 94, 516, 1, 0, 0, 0, 96, 521, 1, 0, 0, 0, 98, 525, 1, 0, 0, 0, 100, 527, 1, 0, 0, 0, 102, 529, 1, 0, 0, 0, 104, 532, 1, 0, 0, 0, 106, 536, 1, 0, 0, 0, 108, 539, 1, 0, 0, 0, 110, 542, 1, 0, 0, 0, 112, 562, 1, 0, 0, 0, 114, 566, 1, 0, 0, 0, 116, 571, 1, 0, 0, 0, 118, 119, 3, 2, 1, 0, 119, 120, 5, 0, 0, 1, 120, 1, 1, 0, 0, 0, 121, 122, 6, 1, -1, 0, 122, 123, 3, 4, 2, 0, 123, 129, 1, 0, 0, 0, 124, 125, 10, 1, 0, 0, 125, 126, 5, 26, 0, 0, 126, 128, 3, 6, 3, 0, 127, 124, 1, 0, 0, 0, 128, 131, 1, 0, 0, 0, 129, 127, 1, 0, 0, 0, 129, 130, 1, 0, 0, 0, 130, 3, 1, 0, 0, 0, 131, 129, 1, 0, 0, 0, 132, 140, 3, 102, 51, 0, 133, 140, 3, 32, 16, 0, 134, 140, 3, 108, 54, 0, 135, 140, 3, 26, 13, 0, 136, 140, 3, 106, 53, 0, 137, 138, 4, 2, 1, 0, 138, 140, 3, 46, 23, 0, 139, 132, 1, 0, 0, 0, 139, 133, 1, 0, 0, 0, 139, 134, 1, 0, 0, 0, 139, 135, 1, 0, 0, 0, 139, 136, 1, 0, 0, 0, 139, 137, 1, 0, 0, 0, 140, 5, 1, 0, 0, 0, 141, 158, 3, 48, 24, 0, 142, 158, 3, 8, 4, 0, 143, 158, 3, 72, 36, 0, 144, 158, 3, 66, 33, 0, 145, 158, 3, 50, 25, 0, 146, 158, 3, 68, 34, 0, 147, 158, 3, 74, 37, 0, 148, 158, 3, 76, 38, 0, 149, 158, 3, 80, 40, 0, 150, 158, 3, 82, 41, 0, 151, 158, 3, 110, 55, 0, 152, 158, 3, 84, 42, 0, 153, 154, 4, 3, 2, 0, 154, 158, 3, 116, 58, 0, 155, 156, 4, 3, 3, 0, 156, 158, 3, 114, 57, 0, 157, 141, 1, 0, 0, 0, 157, 142, 1, 0, 0, 0, 157, 143, 1, 0, 0, 0, 157, 144, 1, 0, 0, 0, 157, 145, 1, 0, 0, 0, 157, 146, 1, 0, 0, 0, 157, 147, 1, 0, 0, 0, 157, 148, 1, 0, 0, 0, 157, 149, 1, 0, 0, 0, 157, 150, 1, 0, 0, 0, 157, 151, 1, 0, 0, 0, 157, 152, 1, 0, 0, 0, 157, 153, 1, 0, 0, 0, 157, 155, 1, 0, 0, 0, 158, 7, 1, 0, 0, 0, 159, 160, 5, 17, 0, 0, 160, 161, 3, 10, 5, 0, 161, 9, 1, 0, 0, 0, 162, 163, 6, 5, -1, 0, 163, 164, 5, 45, 0, 0, 164, 193, 3, 10, 5, 8, 165, 193, 3, 16, 8, 0, 166, 193, 3, 12, 6, 0, 167, 169, 3, 16, 8, 0, 168, 170, 5, 45, 0, 0, 169, 168, 1, 0, 0, 0, 169, 170, 1, 0, 0, 0, 170, 171, 1, 0, 0, 0, 171, 172, 5, 40, 0, 0, 172, 173, 5, 44, 0, 0, 173, 178, 3, 16, 8, 0, 174, 175, 5, 35, 0, 0, 175, 177, 3, 16, 8, 0, 176, 174, 1, 0, 0, 0, 177, 180, 1, 0, 0, 0, 178, 176, 1, 0, 0, 0, 178, 179, 1, 0, 0, 0, 179, 181, 1, 0, 0, 0, 180, 178, 1, 0, 0, 0, 181, 182, 5, 51, 0, 0, 182, 193, 1, 0, 0, 0, 183, 184, 3, 16, 8, 0, 184, 186, 5, 41, 0, 0, 185, 187, 5, 45, 0, 0, 186, 185, 1, 0, 0, 0, 186, 187, 1, 0, 0, 0, 187, 188, 1, 0, 0, 0, 188, 189, 5, 46, 0, 0, 189, 193, 1, 0, 0, 0, 190, 191, 4, 5, 4, 0, 191, 193, 3, 14, 7, 0, 192, 162, 1, 0, 0, 0, 192, 165, 1, 0, 0, 0, 192, 166, 1, 0, 0, 0, 192, 167, 1, 0, 0, 0, 192, 183, 1, 0, 0, 0, 192, 190, 1, 0, 0, 0, 193, 202, 1, 0, 0, 0, 194, 195, 10, 5, 0, 0, 195, 196, 5, 31, 0, 0, 196, 201, 3, 10, 5, 6, 197, 198, 10, 4, 0, 0, 198, 199, 5, 48, 0, 0, 199, 201, 3, 10, 5, 5, 200, 194, 1, 0, 0, 0, 200, 197, 1, 0, 0, 0, 201, 204, 1, 0, 0, 0, 202, 200, 1, 0, 0, 0, 202, 203, 1, 0, 0, 0, 203, 11, 1, 0, 0, 0, 204, 202, 1, 0, 0, 0, 205, 207, 3, 16, 8, 0, 206, 208, 5, 45, 0, 0, 207, 206, 1, 0, 0, 0, 207, 208, 1, 0, 0, 0, 208, 209, 1, 0, 0, 0, 209, 210, 5, 43, 0, 0, 210, 211, 3, 98, 49, 0, 211, 220, 1, 0, 0, 0, 212, 214, 3, 16, 8, 0, 213, 215, 5, 45, 0, 0, 214, 213, 1, 0, 0, 0, 214, 215, 1, 0, 0, 0, 215, 216, 1, 0, 0, 0, 216, 217, 5, 50, 0, 0, 217, 218, 3, 98, 49, 0, 218, 220, 1, 0, 0, 0, 219, 205, 1, 0, 0, 0, 219, 212, 1, 0, 0, 0, 220, 13, 1, 0, 0, 0, 221, 222, 3, 16, 8, 0, 222, 223, 5, 20, 0, 0, 223, 224, 3, 98, 49, 0, 224, 15, 1, 0, 0, 0, 225, 231, 3, 18, 9, 0, 226, 227, 3, 18, 9, 0, 227, 228, 3, 100, 50, 0, 228, 229, 3, 18, 9, 0, 229, 231, 1, 0, 0, 0, 230, 225, 1, 0, 0, 0, 230, 226, 1, 0, 0, 0, 231, 17, 1, 0, 0, 0, 232, 233, 6, 9, -1, 0, 233, 237, 3, 20, 10, 0, 234, 235, 7, 0, 0, 0, 235, 237, 3, 18, 9, 3, 236, 232, 1, 0, 0, 0, 236, 234, 1, 0, 0, 0, 237, 246, 1, 0, 0, 0, 238, 239, 10, 2, 0, 0, 239, 240, 7, 1, 0, 0, 240, 245, 3, 18, 9, 3, 241, 242, 10, 1, 0, 0, 242, 243, 7, 0, 0, 0, 243, 245, 3, 18, 9, 2, 244, 238, 1, 0, 0, 0, 244, 241, 1, 0, 0, 0, 245, 248, 1, 0, 0, 0, 246, 244, 1, 0, 0, 0, 246, 247, 1, 0, 0, 0, 247, 19, 1, 0, 0, 0, 248, 246, 1, 0, 0, 0, 249, 250, 6, 10, -1, 0, 250, 258, 3, 62, 31, 0, 251, 258, 3, 52, 26, 0, 252, 258, 3, 22, 11, 0, 253, 254, 5, 44, 0, 0, 254, 255, 3, 10, 5, 0, 255, 256, 5, 51, 0, 0, 256, 258, 1, 0, 0, 0, 257, 249, 1, 0, 0, 0, 257, 251, 1, 0, 0, 0, 257, 252, 1, 0, 0, 0, 257, 253, 1, 0, 0, 0, 258, 264, 1, 0, 0, 0, 259, 260, 10, 1, 0, 0, 260, 261, 5, 34, 0, 0, 261, 263, 3, 24, 12, 0, 262, 259, 1, 0, 0, 0, 263, 266, 1, 0, 0, 0, 264, 262, 1, 0, 0, 0, 264, 265, 1, 0, 0, 0, 265, 21, 1, 0, 0, 0, 266, 264, 1, 0, 0, 0, 267, 268, 3, 58, 29, 0, 268, 278, 5, 44, 0, 0, 269, 279, 5, 62, 0, 0, 270, 275, 3, 10, 5, 0, 271, 272, 5, 35, 0, 0, 272, 274, 3, 10, 5, 0, 273, 271, 1, 0, 0, 0, 274, 277, 1, 0, 0, 0, 275, 273, 1, 0, 0, 0, 275, 276, 1, 0, 0, 0, 276, 279, 1, 0, 0, 0, 277, 275, 1, 0, 0, 0, 278, 269, 1, 0, 0, 0, 278, 270, 1, 0, 0, 0, 278, 279, 1, 0, 0, 0, 279, 280, 1, 0, 0, 0, 280, 281, 5, 51, 0, 0, 281, 23, 1, 0, 0, 0, 282, 283, 3, 58, 29, 0, 283, 25, 1, 0, 0, 0, 284, 285, 5, 13, 0, 0, 285, 286, 3, 28, 14, 0, 286, 27, 1, 0, 0, 0, 287, 292, 3, 30, 15, 0, 288, 289, 5, 35, 0, 0, 289, 291, 3, 30, 15, 0, 290, 288, 1, 0, 0, 0, 291, 294, 1, 0, 0, 0, 292, 290, 1, 0, 0, 0, 292, 293, 1, 0, 0, 0, 293, 29, 1, 0, 0, 0, 294, 292, 1, 0, 0, 0, 295, 301, 3, 10, 5, 0, 296, 297, 3, 52, 26, 0, 297, 298, 5, 33, 0, 0, 298, 299, 3, 10, 5, 0, 299, 301, 1, 0, 0, 0, 300, 295, 1, 0, 0, 0, 300, 296, 1, 0, 0, 0, 301, 31, 1, 0, 0, 0, 302, 303, 5, 6, 0, 0, 303, 308, 3, 34, 17, 0, 304, 305, 5, 35, 0, 0, 305, 307, 3, 34, 17, 0, 306, 304, 1, 0, 0, 0, 307, 310, 1, 0, 0, 0, 308, 306, 1, 0, 0, 0, 308, 309, 1, 0, 0, 0, 309, 312, 1, 0, 0, 0, 310, 308, 1, 0, 0, 0, 311, 313, 3, 40, 20, 0, 312, 311, 1, 0, 0, 0, 312, 313, 1, 0, 0, 0, 313, 33, 1, 0, 0, 0, 314, 315, 3, 36, 18, 0, 315, 316, 5, 109, 0, 0, 316, 317, 3, 38, 19, 0, 317, 320, 1, 0, 0, 0, 318, 320, 3, 38, 19, 0, 319, 314, 1, 0, 0, 0, 319, 318, 1, 0, 0, 0, 320, 35, 1, 0, 0, 0, 321, 322, 5, 77, 0, 0, 322, 37, 1, 0, 0, 0, 323, 324, 7, 2, 0, 0, 324, 39, 1, 0, 0, 0, 325, 328, 3, 42, 21, 0, 326, 328, 3, 44, 22, 0, 327, 325, 1, 0, 0, 0, 327, 326, 1, 0, 0, 0, 328, 41, 1, 0, 0, 0, 329, 330, 5, 76, 0, 0, 330, 335, 5, 77, 0, 0, 331, 332, 5, 35, 0, 0, 332, 334, 5, 77, 0, 0, 333, 331, 1, 0, 0, 0, 334, 337, 1, 0, 0, 0, 335, 333, 1, 0, 0, 0, 335, 336, 1, 0, 0, 0, 336, 43, 1, 0, 0, 0, 337, 335, 1, 0, 0, 0, 338, 339, 5, 66, 0, 0, 339, 340, 3, 42, 21, 0, 340, 341, 5, 67, 0, 0, 341, 45, 1, 0, 0, 0, 342, 343, 5, 21, 0, 0, 343, 348, 3, 34, 17, 0, 344, 345, 5, 35, 0, 0, 345, 347, 3, 34, 17, 0, 346, 344, 1, 0, 0, 0, 347, 350, 1, 0, 0, 0, 348, 346, 1, 0, 0, 0, 348, 349, 1, 0, 0, 0, 349, 352, 1, 0, 0, 0, 350, 348, 1, 0, 0, 0, 351, 353, 3, 28, 14, 0, 352, 351, 1, 0, 0, 0, 352, 353, 1, 0, 0, 0, 353, 356, 1, 0, 0, 0, 354, 355, 5, 30, 0, 0, 355, 357, 3, 28, 14, 0, 356, 354, 1, 0, 0, 0, 356, 357, 1, 0, 0, 0, 357, 47, 1, 0, 0, 0, 358, 359, 5, 4, 0, 0, 359, 360, 3, 28, 14, 0, 360, 49, 1, 0, 0, 0, 361, 363, 5, 16, 0, 0, 362, 364, 3, 28, 14, 0, 363, 362, 1, 0, 0, 0, 363, 364, 1, 0, 0, 0, 364, 367, 1, 0, 0, 0, 365, 366, 5, 30, 0, 0, 366, 368, 3, 28, 14, 0, 367, 365, 1, 0, 0, 0, 367, 368, 1, 0, 0, 0, 368, 51, 1, 0, 0, 0, 369, 374, 3, 58, 29, 0, 370, 371, 5, 37, 0, 0, 371, 373, 3, 58, 29, 0, 372, 370, 1, 0, 0, 0, 373, 376, 1, 0, 0, 0, 374, 372, 1, 0, 0, 0, 374, 375, 1, 0, 0, 0, 375, 53, 1, 0, 0, 0, 376, 374, 1, 0, 0, 0, 377, 382, 3, 60, 30, 0, 378, 379, 5, 37, 0, 0, 379, 381, 3, 60, 30, 0, 380, 378, 1, 0, 0, 0, 381, 384, 1, 0, 0, 0, 382, 380, 1, 0, 0, 0, 382, 383, 1, 0, 0, 0, 383, 55, 1, 0, 0, 0, 384, 382, 1, 0, 0, 0, 385, 390, 3, 54, 27, 0, 386, 387, 5, 35, 0, 0, 387, 389, 3, 54, 27, 0, 388, 386, 1, 0, 0, 0, 389, 392, 1, 0, 0, 0, 390, 388, 1, 0, 0, 0, 390, 391, 1, 0, 0, 0, 391, 57, 1, 0, 0, 0, 392, 390, 1, 0, 0, 0, 393, 394, 7, 3, 0, 0, 394, 59, 1, 0, 0, 0, 395, 396, 5, 81, 0, 0, 396, 61, 1, 0, 0, 0, 397, 440, 5, 46, 0, 0, 398, 399, 3, 96, 48, 0, 399, 400, 5, 68, 0, 0, 400, 440, 1, 0, 0, 0, 401, 440, 3, 94, 47, 0, 402, 440, 3, 96, 48, 0, 403, 440, 3, 90, 45, 0, 404, 440, 3, 64, 32, 0, 405, 440, 3, 98, 49, 0, 406, 407, 5, 66, 0, 0, 407, 412, 3, 92, 46, 0, 408, 409, 5, 35, 0, 0, 409, 411, 3, 92, 46, 0, 410, 408, 1, 0, 0, 0, 411, 414, 1, 0, 0, 0, 412, 410, 1, 0, 0, 0, 412, 413, 1, 0, 0, 0, 413, 415, 1, 0, 0, 0, 414, 412, 1, 0, 0, 0, 415, 416, 5, 67, 0, 0, 416, 440, 1, 0, 0, 0, 417, 418, 5, 66, 0, 0, 418, 423, 3, 90, 45, 0, 419, 420, 5, 35, 0, 0, 420, 422, 3, 90, 45, 0, 421, 419, 1, 0, 0, 0, 422, 425, 1, 0, 0, 0, 423, 421, 1, 0, 0, 0, 423, 424, 1, 0, 0, 0, 424, 426, 1, 0, 0, 0, 425, 423, 1, 0, 0, 0, 426, 427, 5, 67, 0, 0, 427, 440, 1, 0, 0, 0, 428, 429, 5, 66, 0, 0, 429, 434, 3, 98, 49, 0, 430, 431, 5, 35, 0, 0, 431, 433, 3, 98, 49, 0, 432, 430, 1, 0, 0, 0, 433, 436, 1, 0, 0, 0, 434, 432, 1, 0, 0, 0, 434, 435, 1, 0, 0, 0, 435, 437, 1, 0, 0, 0, 436, 434, 1, 0, 0, 0, 437, 438, 5, 67, 0, 0, 438, 440, 1, 0, 0, 0, 439, 397, 1, 0, 0, 0, 439, 398, 1, 0, 0, 0, 439, 401, 1, 0, 0, 0, 439, 402, 1, 0, 0, 0, 439, 403, 1, 0, 0, 0, 439, 404, 1, 0, 0, 0, 439, 405, 1, 0, 0, 0, 439, 406, 1, 0, 0, 0, 439, 417, 1, 0, 0, 0, 439, 428, 1, 0, 0, 0, 440, 63, 1, 0, 0, 0, 441, 444, 5, 49, 0, 0, 442, 444, 5, 65, 0, 0, 443, 441, 1, 0, 0, 0, 443, 442, 1, 0, 0, 0, 444, 65, 1, 0, 0, 0, 445, 446, 5, 9, 0, 0, 446, 447, 5, 28, 0, 0, 447, 67, 1, 0, 0, 0, 448, 449, 5, 15, 0, 0, 449, 454, 3, 70, 35, 0, 450, 451, 5, 35, 0, 0, 451, 453, 3, 70, 35, 0, 452, 450, 1, 0, 0, 0, 453, 456, 1, 0, 0, 0, 454, 452, 1, 0, 0, 0, 454, 455, 1, 0, 0, 0, 455, 69, 1, 0, 0, 0, 456, 454, 1, 0, 0, 0, 457, 459, 3, 10, 5, 0, 458, 460, 7, 4, 0, 0, 459, 458, 1, 0, 0, 0, 459, 460, 1, 0, 0, 0, 460, 463, 1, 0, 0, 0, 461, 462, 5, 47, 0, 0, 462, 464, 7, 5, 0, 0, 463, 461, 1, 0, 0, 0, 463, 464, 1, 0, 0, 0, 464, 71, 1, 0, 0, 0, 465, 466, 5, 8, 0, 0, 466, 467, 3, 56, 28, 0, 467, 73, 1, 0, 0, 0, 468, 469, 5, 2, 0, 0, 469, 470, 3, 56, 28, 0, 470, 75, 1, 0, 0, 0, 471, 472, 5, 12, 0, 0, 472, 477, 3, 78, 39, 0, 473, 474, 5, 35, 0, 0, 474, 476, 3, 78, 39, 0, 475, 473, 1, 0, 0, 0, 476, 479, 1, 0, 0, 0, 477, 475, 1, 0, 0, 0, 477, 478, 1, 0, 0, 0, 478, 77, 1, 0, 0, 0, 479, 477, 1, 0, 0, 0, 480, 481, 3, 54, 27, 0, 481, 482, 5, 85, 0, 0, 482, 483, 3, 54, 27, 0, 483, 79, 1, 0, 0, 0, 484, 485, 5, 1, 0, 0, 485, 486, 3, 20, 10, 0, 486, 488, 3, 98, 49, 0, 487, 489, 3, 86, 43, 0, 488, 487, 1, 0, 0, 0, 488, 489, 1, 0, 0, 0, 489, 81, 1, 0, 0, 0, 490, 491, 5, 7, 0, 0, 491, 492, 3, 20, 10, 0, 492, 493, 3, 98, 49, 0, 493, 83, 1, 0, 0, 0, 494, 495, 5, 11, 0, 0, 495, 496, 3, 52, 26, 0, 496, 85, 1, 0, 0, 0, 497, 502, 3, 88, 44, 0, 498, 499, 5, 35, 0, 0, 499, 501, 3, 88, 44, 0, 500, 498, 1, 0, 0, 0, 501, 504, 1, 0, 0, 0, 502, 500, 1, 0, 0, 0, 502, 503, 1, 0, 0, 0, 503, 87, 1, 0, 0, 0, 504, 502, 1, 0, 0, 0, 505, 506, 3, 58, 29, 0, 506, 507, 5, 33, 0, 0, 507, 508, 3, 62, 31, 0, 508, 89, 1, 0, 0, 0, 509, 510, 7, 6, 0, 0, 510, 91, 1, 0, 0, 0, 511, 514, 3, 94, 47, 0, 512, 514, 3, 96, 48, 0, 513, 511, 1, 0, 0, 0, 513, 512, 1, 0, 0, 0, 514, 93, 1, 0, 0, 0, 515, 517, 7, 0, 0, 0, 516, 515, 1, 0, 0, 0, 516, 517, 1, 0, 0, 0, 517, 518, 1, 0, 0, 0, 518, 519, 5, 29, 0, 0, 519, 95, 1, 0, 0, 0, 520, 522, 7, 0, 0, 0, 521, 520, 1, 0, 0, 0, 521, 522, 1, 0, 0, 0, 522, 523, 1, 0, 0, 0, 523, 524, 5, 28, 0, 0, 524, 97, 1, 0, 0, 0, 525, 526, 5, 27, 0, 0, 526, 99, 1, 0, 0, 0, 527, 528, 7, 7, 0, 0, 528, 101, 1, 0, 0, 0, 529, 530, 5, 5, 0, 0, 530, 531, 3, 104, 52, 0, 531, 103, 1, 0, 0, 0, 532, 533, 5, 66, 0, 0, 533, 534, 3, 2, 1, 0, 534, 535, 5, 67, 0, 0, 535, 105, 1, 0, 0, 0, 536, 537, 5, 14, 0, 0, 537, 538, 5, 101, 0, 0, 538, 107, 1, 0, 0, 0, 539, 540, 5, 10, 0, 0, 540, 541, 5, 105, 0, 0, 541, 109, 1, 0, 0, 0, 542, 543, 5, 3, 0, 0, 543, 546, 5, 91, 0, 0, 544, 545, 5, 89, 0, 0, 545, 547, 3, 54, 27, 0, 546, 544, 1, 0, 0, 0, 546, 547, 1, 0, 0, 0, 547, 557, 1, 0, 0, 0, 548, 549, 5, 90, 0, 0, 549, 554, 3, 112, 56, 0, 550, 551, 5, 35, 0, 0, 551, 553, 3, 112, 56, 0, 552, 550, 1, 0, 0, 0, 553, 556, 1, 0, 0, 0, 554, 552, 1, 0, 0, 0, 554, 555, 1, 0, 0, 0, 555, 558, 1, 0, 0, 0, 556, 554, 1, 0, 0, 0, 557, 548, 1, 0, 0, 0, 557, 558, 1, 0, 0, 0, 558, 111, 1, 0, 0, 0, 559, 560, 3, 54, 27, 0, 560, 561, 5, 33, 0, 0, 561, 563, 1, 0, 0, 0, 562, 559, 1, 0, 0, 0, 562, 563, 1, 0, 0, 0, 563, 564, 1, 0, 0, 0, 564, 565, 3, 54, 27, 0, 565, 113, 1, 0, 0, 0, 566, 567, 5, 19, 0, 0, 567, 568, 3, 34, 17, 0, 568, 569, 5, 89, 0, 0, 569, 570, 3, 56, 28, 0, 570, 115, 1, 0, 0, 0, 571, 572, 5, 18, 0, 0, 572, 575, 3, 28, 14, 0, 573, 574, 5, 30, 0, 0, 574, 576, 3, 28, 14, 0, 575, 573, 1, 0, 0, 0, 575, 576, 1, 0, 0, 0, 576, 117, 1, 0, 0, 0, 54, 129, 139, 157, 169, 178, 186, 192, 200, 202, 207, 214, 219, 230, 236, 244, 246, 257, 264, 275, 278, 292, 300, 308, 312, 319, 327, 335, 348, 352, 356, 363, 367, 374, 382, 390, 412, 423, 434, 439, 443, 454, 459, 463, 477, 488, 502, 513, 516, 521, 546, 554, 557, 562, 575] \ No newline at end of file +[4, 1, 120, 572, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 126, 8, 1, 10, 1, 12, 1, 129, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 137, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 155, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 167, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 174, 8, 5, 10, 5, 12, 5, 177, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 184, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 190, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 198, 8, 5, 10, 5, 12, 5, 201, 9, 5, 1, 6, 1, 6, 3, 6, 205, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 212, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 217, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 228, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 234, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 242, 8, 9, 10, 9, 12, 9, 245, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 255, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 260, 8, 10, 10, 10, 12, 10, 263, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 271, 8, 11, 10, 11, 12, 11, 274, 9, 11, 3, 11, 276, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 5, 14, 288, 8, 14, 10, 14, 12, 14, 291, 9, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 298, 8, 15, 1, 16, 1, 16, 1, 16, 1, 16, 5, 16, 304, 8, 16, 10, 16, 12, 16, 307, 9, 16, 1, 16, 3, 16, 310, 8, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 3, 17, 317, 8, 17, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 3, 20, 325, 8, 20, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 331, 8, 21, 10, 21, 12, 21, 334, 9, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 5, 23, 344, 8, 23, 10, 23, 12, 23, 347, 9, 23, 1, 23, 3, 23, 350, 8, 23, 1, 23, 1, 23, 3, 23, 354, 8, 23, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 3, 25, 361, 8, 25, 1, 25, 1, 25, 3, 25, 365, 8, 25, 1, 26, 1, 26, 1, 26, 5, 26, 370, 8, 26, 10, 26, 12, 26, 373, 9, 26, 1, 27, 1, 27, 1, 27, 5, 27, 378, 8, 27, 10, 27, 12, 27, 381, 9, 27, 1, 28, 1, 28, 1, 28, 5, 28, 386, 8, 28, 10, 28, 12, 28, 389, 9, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 408, 8, 31, 10, 31, 12, 31, 411, 9, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 419, 8, 31, 10, 31, 12, 31, 422, 9, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 430, 8, 31, 10, 31, 12, 31, 433, 9, 31, 1, 31, 1, 31, 3, 31, 437, 8, 31, 1, 32, 1, 32, 3, 32, 441, 8, 32, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 450, 8, 34, 10, 34, 12, 34, 453, 9, 34, 1, 35, 1, 35, 3, 35, 457, 8, 35, 1, 35, 1, 35, 3, 35, 461, 8, 35, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 473, 8, 38, 10, 38, 12, 38, 476, 9, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40, 3, 40, 486, 8, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 5, 43, 498, 8, 43, 10, 43, 12, 43, 501, 9, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 3, 46, 511, 8, 46, 1, 47, 3, 47, 514, 8, 47, 1, 47, 1, 47, 1, 48, 3, 48, 519, 8, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 3, 54, 541, 8, 54, 1, 54, 1, 54, 1, 54, 1, 54, 5, 54, 547, 8, 54, 10, 54, 12, 54, 550, 9, 54, 3, 54, 552, 8, 54, 1, 55, 1, 55, 1, 55, 3, 55, 557, 8, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 3, 57, 570, 8, 57, 1, 57, 0, 4, 2, 10, 18, 20, 58, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 0, 8, 1, 0, 59, 60, 1, 0, 61, 63, 2, 0, 26, 26, 76, 76, 1, 0, 67, 68, 2, 0, 31, 31, 35, 35, 2, 0, 38, 38, 41, 41, 2, 0, 37, 37, 51, 51, 2, 0, 52, 52, 54, 58, 597, 0, 116, 1, 0, 0, 0, 2, 119, 1, 0, 0, 0, 4, 136, 1, 0, 0, 0, 6, 154, 1, 0, 0, 0, 8, 156, 1, 0, 0, 0, 10, 189, 1, 0, 0, 0, 12, 216, 1, 0, 0, 0, 14, 218, 1, 0, 0, 0, 16, 227, 1, 0, 0, 0, 18, 233, 1, 0, 0, 0, 20, 254, 1, 0, 0, 0, 22, 264, 1, 0, 0, 0, 24, 279, 1, 0, 0, 0, 26, 281, 1, 0, 0, 0, 28, 284, 1, 0, 0, 0, 30, 297, 1, 0, 0, 0, 32, 299, 1, 0, 0, 0, 34, 316, 1, 0, 0, 0, 36, 318, 1, 0, 0, 0, 38, 320, 1, 0, 0, 0, 40, 324, 1, 0, 0, 0, 42, 326, 1, 0, 0, 0, 44, 335, 1, 0, 0, 0, 46, 339, 1, 0, 0, 0, 48, 355, 1, 0, 0, 0, 50, 358, 1, 0, 0, 0, 52, 366, 1, 0, 0, 0, 54, 374, 1, 0, 0, 0, 56, 382, 1, 0, 0, 0, 58, 390, 1, 0, 0, 0, 60, 392, 1, 0, 0, 0, 62, 436, 1, 0, 0, 0, 64, 440, 1, 0, 0, 0, 66, 442, 1, 0, 0, 0, 68, 445, 1, 0, 0, 0, 70, 454, 1, 0, 0, 0, 72, 462, 1, 0, 0, 0, 74, 465, 1, 0, 0, 0, 76, 468, 1, 0, 0, 0, 78, 477, 1, 0, 0, 0, 80, 481, 1, 0, 0, 0, 82, 487, 1, 0, 0, 0, 84, 491, 1, 0, 0, 0, 86, 494, 1, 0, 0, 0, 88, 502, 1, 0, 0, 0, 90, 506, 1, 0, 0, 0, 92, 510, 1, 0, 0, 0, 94, 513, 1, 0, 0, 0, 96, 518, 1, 0, 0, 0, 98, 522, 1, 0, 0, 0, 100, 524, 1, 0, 0, 0, 102, 526, 1, 0, 0, 0, 104, 529, 1, 0, 0, 0, 106, 533, 1, 0, 0, 0, 108, 536, 1, 0, 0, 0, 110, 556, 1, 0, 0, 0, 112, 560, 1, 0, 0, 0, 114, 565, 1, 0, 0, 0, 116, 117, 3, 2, 1, 0, 117, 118, 5, 0, 0, 1, 118, 1, 1, 0, 0, 0, 119, 120, 6, 1, -1, 0, 120, 121, 3, 4, 2, 0, 121, 127, 1, 0, 0, 0, 122, 123, 10, 1, 0, 0, 123, 124, 5, 25, 0, 0, 124, 126, 3, 6, 3, 0, 125, 122, 1, 0, 0, 0, 126, 129, 1, 0, 0, 0, 127, 125, 1, 0, 0, 0, 127, 128, 1, 0, 0, 0, 128, 3, 1, 0, 0, 0, 129, 127, 1, 0, 0, 0, 130, 137, 3, 102, 51, 0, 131, 137, 3, 32, 16, 0, 132, 137, 3, 26, 13, 0, 133, 137, 3, 106, 53, 0, 134, 135, 4, 2, 1, 0, 135, 137, 3, 46, 23, 0, 136, 130, 1, 0, 0, 0, 136, 131, 1, 0, 0, 0, 136, 132, 1, 0, 0, 0, 136, 133, 1, 0, 0, 0, 136, 134, 1, 0, 0, 0, 137, 5, 1, 0, 0, 0, 138, 155, 3, 48, 24, 0, 139, 155, 3, 8, 4, 0, 140, 155, 3, 72, 36, 0, 141, 155, 3, 66, 33, 0, 142, 155, 3, 50, 25, 0, 143, 155, 3, 68, 34, 0, 144, 155, 3, 74, 37, 0, 145, 155, 3, 76, 38, 0, 146, 155, 3, 80, 40, 0, 147, 155, 3, 82, 41, 0, 148, 155, 3, 108, 54, 0, 149, 155, 3, 84, 42, 0, 150, 151, 4, 3, 2, 0, 151, 155, 3, 114, 57, 0, 152, 153, 4, 3, 3, 0, 153, 155, 3, 112, 56, 0, 154, 138, 1, 0, 0, 0, 154, 139, 1, 0, 0, 0, 154, 140, 1, 0, 0, 0, 154, 141, 1, 0, 0, 0, 154, 142, 1, 0, 0, 0, 154, 143, 1, 0, 0, 0, 154, 144, 1, 0, 0, 0, 154, 145, 1, 0, 0, 0, 154, 146, 1, 0, 0, 0, 154, 147, 1, 0, 0, 0, 154, 148, 1, 0, 0, 0, 154, 149, 1, 0, 0, 0, 154, 150, 1, 0, 0, 0, 154, 152, 1, 0, 0, 0, 155, 7, 1, 0, 0, 0, 156, 157, 5, 16, 0, 0, 157, 158, 3, 10, 5, 0, 158, 9, 1, 0, 0, 0, 159, 160, 6, 5, -1, 0, 160, 161, 5, 44, 0, 0, 161, 190, 3, 10, 5, 8, 162, 190, 3, 16, 8, 0, 163, 190, 3, 12, 6, 0, 164, 166, 3, 16, 8, 0, 165, 167, 5, 44, 0, 0, 166, 165, 1, 0, 0, 0, 166, 167, 1, 0, 0, 0, 167, 168, 1, 0, 0, 0, 168, 169, 5, 39, 0, 0, 169, 170, 5, 43, 0, 0, 170, 175, 3, 16, 8, 0, 171, 172, 5, 34, 0, 0, 172, 174, 3, 16, 8, 0, 173, 171, 1, 0, 0, 0, 174, 177, 1, 0, 0, 0, 175, 173, 1, 0, 0, 0, 175, 176, 1, 0, 0, 0, 176, 178, 1, 0, 0, 0, 177, 175, 1, 0, 0, 0, 178, 179, 5, 50, 0, 0, 179, 190, 1, 0, 0, 0, 180, 181, 3, 16, 8, 0, 181, 183, 5, 40, 0, 0, 182, 184, 5, 44, 0, 0, 183, 182, 1, 0, 0, 0, 183, 184, 1, 0, 0, 0, 184, 185, 1, 0, 0, 0, 185, 186, 5, 45, 0, 0, 186, 190, 1, 0, 0, 0, 187, 188, 4, 5, 4, 0, 188, 190, 3, 14, 7, 0, 189, 159, 1, 0, 0, 0, 189, 162, 1, 0, 0, 0, 189, 163, 1, 0, 0, 0, 189, 164, 1, 0, 0, 0, 189, 180, 1, 0, 0, 0, 189, 187, 1, 0, 0, 0, 190, 199, 1, 0, 0, 0, 191, 192, 10, 5, 0, 0, 192, 193, 5, 30, 0, 0, 193, 198, 3, 10, 5, 6, 194, 195, 10, 4, 0, 0, 195, 196, 5, 47, 0, 0, 196, 198, 3, 10, 5, 5, 197, 191, 1, 0, 0, 0, 197, 194, 1, 0, 0, 0, 198, 201, 1, 0, 0, 0, 199, 197, 1, 0, 0, 0, 199, 200, 1, 0, 0, 0, 200, 11, 1, 0, 0, 0, 201, 199, 1, 0, 0, 0, 202, 204, 3, 16, 8, 0, 203, 205, 5, 44, 0, 0, 204, 203, 1, 0, 0, 0, 204, 205, 1, 0, 0, 0, 205, 206, 1, 0, 0, 0, 206, 207, 5, 42, 0, 0, 207, 208, 3, 98, 49, 0, 208, 217, 1, 0, 0, 0, 209, 211, 3, 16, 8, 0, 210, 212, 5, 44, 0, 0, 211, 210, 1, 0, 0, 0, 211, 212, 1, 0, 0, 0, 212, 213, 1, 0, 0, 0, 213, 214, 5, 49, 0, 0, 214, 215, 3, 98, 49, 0, 215, 217, 1, 0, 0, 0, 216, 202, 1, 0, 0, 0, 216, 209, 1, 0, 0, 0, 217, 13, 1, 0, 0, 0, 218, 219, 3, 16, 8, 0, 219, 220, 5, 19, 0, 0, 220, 221, 3, 98, 49, 0, 221, 15, 1, 0, 0, 0, 222, 228, 3, 18, 9, 0, 223, 224, 3, 18, 9, 0, 224, 225, 3, 100, 50, 0, 225, 226, 3, 18, 9, 0, 226, 228, 1, 0, 0, 0, 227, 222, 1, 0, 0, 0, 227, 223, 1, 0, 0, 0, 228, 17, 1, 0, 0, 0, 229, 230, 6, 9, -1, 0, 230, 234, 3, 20, 10, 0, 231, 232, 7, 0, 0, 0, 232, 234, 3, 18, 9, 3, 233, 229, 1, 0, 0, 0, 233, 231, 1, 0, 0, 0, 234, 243, 1, 0, 0, 0, 235, 236, 10, 2, 0, 0, 236, 237, 7, 1, 0, 0, 237, 242, 3, 18, 9, 3, 238, 239, 10, 1, 0, 0, 239, 240, 7, 0, 0, 0, 240, 242, 3, 18, 9, 2, 241, 235, 1, 0, 0, 0, 241, 238, 1, 0, 0, 0, 242, 245, 1, 0, 0, 0, 243, 241, 1, 0, 0, 0, 243, 244, 1, 0, 0, 0, 244, 19, 1, 0, 0, 0, 245, 243, 1, 0, 0, 0, 246, 247, 6, 10, -1, 0, 247, 255, 3, 62, 31, 0, 248, 255, 3, 52, 26, 0, 249, 255, 3, 22, 11, 0, 250, 251, 5, 43, 0, 0, 251, 252, 3, 10, 5, 0, 252, 253, 5, 50, 0, 0, 253, 255, 1, 0, 0, 0, 254, 246, 1, 0, 0, 0, 254, 248, 1, 0, 0, 0, 254, 249, 1, 0, 0, 0, 254, 250, 1, 0, 0, 0, 255, 261, 1, 0, 0, 0, 256, 257, 10, 1, 0, 0, 257, 258, 5, 33, 0, 0, 258, 260, 3, 24, 12, 0, 259, 256, 1, 0, 0, 0, 260, 263, 1, 0, 0, 0, 261, 259, 1, 0, 0, 0, 261, 262, 1, 0, 0, 0, 262, 21, 1, 0, 0, 0, 263, 261, 1, 0, 0, 0, 264, 265, 3, 58, 29, 0, 265, 275, 5, 43, 0, 0, 266, 276, 5, 61, 0, 0, 267, 272, 3, 10, 5, 0, 268, 269, 5, 34, 0, 0, 269, 271, 3, 10, 5, 0, 270, 268, 1, 0, 0, 0, 271, 274, 1, 0, 0, 0, 272, 270, 1, 0, 0, 0, 272, 273, 1, 0, 0, 0, 273, 276, 1, 0, 0, 0, 274, 272, 1, 0, 0, 0, 275, 266, 1, 0, 0, 0, 275, 267, 1, 0, 0, 0, 275, 276, 1, 0, 0, 0, 276, 277, 1, 0, 0, 0, 277, 278, 5, 50, 0, 0, 278, 23, 1, 0, 0, 0, 279, 280, 3, 58, 29, 0, 280, 25, 1, 0, 0, 0, 281, 282, 5, 12, 0, 0, 282, 283, 3, 28, 14, 0, 283, 27, 1, 0, 0, 0, 284, 289, 3, 30, 15, 0, 285, 286, 5, 34, 0, 0, 286, 288, 3, 30, 15, 0, 287, 285, 1, 0, 0, 0, 288, 291, 1, 0, 0, 0, 289, 287, 1, 0, 0, 0, 289, 290, 1, 0, 0, 0, 290, 29, 1, 0, 0, 0, 291, 289, 1, 0, 0, 0, 292, 298, 3, 10, 5, 0, 293, 294, 3, 52, 26, 0, 294, 295, 5, 32, 0, 0, 295, 296, 3, 10, 5, 0, 296, 298, 1, 0, 0, 0, 297, 292, 1, 0, 0, 0, 297, 293, 1, 0, 0, 0, 298, 31, 1, 0, 0, 0, 299, 300, 5, 6, 0, 0, 300, 305, 3, 34, 17, 0, 301, 302, 5, 34, 0, 0, 302, 304, 3, 34, 17, 0, 303, 301, 1, 0, 0, 0, 304, 307, 1, 0, 0, 0, 305, 303, 1, 0, 0, 0, 305, 306, 1, 0, 0, 0, 306, 309, 1, 0, 0, 0, 307, 305, 1, 0, 0, 0, 308, 310, 3, 40, 20, 0, 309, 308, 1, 0, 0, 0, 309, 310, 1, 0, 0, 0, 310, 33, 1, 0, 0, 0, 311, 312, 3, 36, 18, 0, 312, 313, 5, 104, 0, 0, 313, 314, 3, 38, 19, 0, 314, 317, 1, 0, 0, 0, 315, 317, 3, 38, 19, 0, 316, 311, 1, 0, 0, 0, 316, 315, 1, 0, 0, 0, 317, 35, 1, 0, 0, 0, 318, 319, 5, 76, 0, 0, 319, 37, 1, 0, 0, 0, 320, 321, 7, 2, 0, 0, 321, 39, 1, 0, 0, 0, 322, 325, 3, 42, 21, 0, 323, 325, 3, 44, 22, 0, 324, 322, 1, 0, 0, 0, 324, 323, 1, 0, 0, 0, 325, 41, 1, 0, 0, 0, 326, 327, 5, 75, 0, 0, 327, 332, 5, 76, 0, 0, 328, 329, 5, 34, 0, 0, 329, 331, 5, 76, 0, 0, 330, 328, 1, 0, 0, 0, 331, 334, 1, 0, 0, 0, 332, 330, 1, 0, 0, 0, 332, 333, 1, 0, 0, 0, 333, 43, 1, 0, 0, 0, 334, 332, 1, 0, 0, 0, 335, 336, 5, 65, 0, 0, 336, 337, 3, 42, 21, 0, 337, 338, 5, 66, 0, 0, 338, 45, 1, 0, 0, 0, 339, 340, 5, 20, 0, 0, 340, 345, 3, 34, 17, 0, 341, 342, 5, 34, 0, 0, 342, 344, 3, 34, 17, 0, 343, 341, 1, 0, 0, 0, 344, 347, 1, 0, 0, 0, 345, 343, 1, 0, 0, 0, 345, 346, 1, 0, 0, 0, 346, 349, 1, 0, 0, 0, 347, 345, 1, 0, 0, 0, 348, 350, 3, 28, 14, 0, 349, 348, 1, 0, 0, 0, 349, 350, 1, 0, 0, 0, 350, 353, 1, 0, 0, 0, 351, 352, 5, 29, 0, 0, 352, 354, 3, 28, 14, 0, 353, 351, 1, 0, 0, 0, 353, 354, 1, 0, 0, 0, 354, 47, 1, 0, 0, 0, 355, 356, 5, 4, 0, 0, 356, 357, 3, 28, 14, 0, 357, 49, 1, 0, 0, 0, 358, 360, 5, 15, 0, 0, 359, 361, 3, 28, 14, 0, 360, 359, 1, 0, 0, 0, 360, 361, 1, 0, 0, 0, 361, 364, 1, 0, 0, 0, 362, 363, 5, 29, 0, 0, 363, 365, 3, 28, 14, 0, 364, 362, 1, 0, 0, 0, 364, 365, 1, 0, 0, 0, 365, 51, 1, 0, 0, 0, 366, 371, 3, 58, 29, 0, 367, 368, 5, 36, 0, 0, 368, 370, 3, 58, 29, 0, 369, 367, 1, 0, 0, 0, 370, 373, 1, 0, 0, 0, 371, 369, 1, 0, 0, 0, 371, 372, 1, 0, 0, 0, 372, 53, 1, 0, 0, 0, 373, 371, 1, 0, 0, 0, 374, 379, 3, 60, 30, 0, 375, 376, 5, 36, 0, 0, 376, 378, 3, 60, 30, 0, 377, 375, 1, 0, 0, 0, 378, 381, 1, 0, 0, 0, 379, 377, 1, 0, 0, 0, 379, 380, 1, 0, 0, 0, 380, 55, 1, 0, 0, 0, 381, 379, 1, 0, 0, 0, 382, 387, 3, 54, 27, 0, 383, 384, 5, 34, 0, 0, 384, 386, 3, 54, 27, 0, 385, 383, 1, 0, 0, 0, 386, 389, 1, 0, 0, 0, 387, 385, 1, 0, 0, 0, 387, 388, 1, 0, 0, 0, 388, 57, 1, 0, 0, 0, 389, 387, 1, 0, 0, 0, 390, 391, 7, 3, 0, 0, 391, 59, 1, 0, 0, 0, 392, 393, 5, 80, 0, 0, 393, 61, 1, 0, 0, 0, 394, 437, 5, 45, 0, 0, 395, 396, 3, 96, 48, 0, 396, 397, 5, 67, 0, 0, 397, 437, 1, 0, 0, 0, 398, 437, 3, 94, 47, 0, 399, 437, 3, 96, 48, 0, 400, 437, 3, 90, 45, 0, 401, 437, 3, 64, 32, 0, 402, 437, 3, 98, 49, 0, 403, 404, 5, 65, 0, 0, 404, 409, 3, 92, 46, 0, 405, 406, 5, 34, 0, 0, 406, 408, 3, 92, 46, 0, 407, 405, 1, 0, 0, 0, 408, 411, 1, 0, 0, 0, 409, 407, 1, 0, 0, 0, 409, 410, 1, 0, 0, 0, 410, 412, 1, 0, 0, 0, 411, 409, 1, 0, 0, 0, 412, 413, 5, 66, 0, 0, 413, 437, 1, 0, 0, 0, 414, 415, 5, 65, 0, 0, 415, 420, 3, 90, 45, 0, 416, 417, 5, 34, 0, 0, 417, 419, 3, 90, 45, 0, 418, 416, 1, 0, 0, 0, 419, 422, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 420, 421, 1, 0, 0, 0, 421, 423, 1, 0, 0, 0, 422, 420, 1, 0, 0, 0, 423, 424, 5, 66, 0, 0, 424, 437, 1, 0, 0, 0, 425, 426, 5, 65, 0, 0, 426, 431, 3, 98, 49, 0, 427, 428, 5, 34, 0, 0, 428, 430, 3, 98, 49, 0, 429, 427, 1, 0, 0, 0, 430, 433, 1, 0, 0, 0, 431, 429, 1, 0, 0, 0, 431, 432, 1, 0, 0, 0, 432, 434, 1, 0, 0, 0, 433, 431, 1, 0, 0, 0, 434, 435, 5, 66, 0, 0, 435, 437, 1, 0, 0, 0, 436, 394, 1, 0, 0, 0, 436, 395, 1, 0, 0, 0, 436, 398, 1, 0, 0, 0, 436, 399, 1, 0, 0, 0, 436, 400, 1, 0, 0, 0, 436, 401, 1, 0, 0, 0, 436, 402, 1, 0, 0, 0, 436, 403, 1, 0, 0, 0, 436, 414, 1, 0, 0, 0, 436, 425, 1, 0, 0, 0, 437, 63, 1, 0, 0, 0, 438, 441, 5, 48, 0, 0, 439, 441, 5, 64, 0, 0, 440, 438, 1, 0, 0, 0, 440, 439, 1, 0, 0, 0, 441, 65, 1, 0, 0, 0, 442, 443, 5, 9, 0, 0, 443, 444, 5, 27, 0, 0, 444, 67, 1, 0, 0, 0, 445, 446, 5, 14, 0, 0, 446, 451, 3, 70, 35, 0, 447, 448, 5, 34, 0, 0, 448, 450, 3, 70, 35, 0, 449, 447, 1, 0, 0, 0, 450, 453, 1, 0, 0, 0, 451, 449, 1, 0, 0, 0, 451, 452, 1, 0, 0, 0, 452, 69, 1, 0, 0, 0, 453, 451, 1, 0, 0, 0, 454, 456, 3, 10, 5, 0, 455, 457, 7, 4, 0, 0, 456, 455, 1, 0, 0, 0, 456, 457, 1, 0, 0, 0, 457, 460, 1, 0, 0, 0, 458, 459, 5, 46, 0, 0, 459, 461, 7, 5, 0, 0, 460, 458, 1, 0, 0, 0, 460, 461, 1, 0, 0, 0, 461, 71, 1, 0, 0, 0, 462, 463, 5, 8, 0, 0, 463, 464, 3, 56, 28, 0, 464, 73, 1, 0, 0, 0, 465, 466, 5, 2, 0, 0, 466, 467, 3, 56, 28, 0, 467, 75, 1, 0, 0, 0, 468, 469, 5, 11, 0, 0, 469, 474, 3, 78, 39, 0, 470, 471, 5, 34, 0, 0, 471, 473, 3, 78, 39, 0, 472, 470, 1, 0, 0, 0, 473, 476, 1, 0, 0, 0, 474, 472, 1, 0, 0, 0, 474, 475, 1, 0, 0, 0, 475, 77, 1, 0, 0, 0, 476, 474, 1, 0, 0, 0, 477, 478, 3, 54, 27, 0, 478, 479, 5, 84, 0, 0, 479, 480, 3, 54, 27, 0, 480, 79, 1, 0, 0, 0, 481, 482, 5, 1, 0, 0, 482, 483, 3, 20, 10, 0, 483, 485, 3, 98, 49, 0, 484, 486, 3, 86, 43, 0, 485, 484, 1, 0, 0, 0, 485, 486, 1, 0, 0, 0, 486, 81, 1, 0, 0, 0, 487, 488, 5, 7, 0, 0, 488, 489, 3, 20, 10, 0, 489, 490, 3, 98, 49, 0, 490, 83, 1, 0, 0, 0, 491, 492, 5, 10, 0, 0, 492, 493, 3, 52, 26, 0, 493, 85, 1, 0, 0, 0, 494, 499, 3, 88, 44, 0, 495, 496, 5, 34, 0, 0, 496, 498, 3, 88, 44, 0, 497, 495, 1, 0, 0, 0, 498, 501, 1, 0, 0, 0, 499, 497, 1, 0, 0, 0, 499, 500, 1, 0, 0, 0, 500, 87, 1, 0, 0, 0, 501, 499, 1, 0, 0, 0, 502, 503, 3, 58, 29, 0, 503, 504, 5, 32, 0, 0, 504, 505, 3, 62, 31, 0, 505, 89, 1, 0, 0, 0, 506, 507, 7, 6, 0, 0, 507, 91, 1, 0, 0, 0, 508, 511, 3, 94, 47, 0, 509, 511, 3, 96, 48, 0, 510, 508, 1, 0, 0, 0, 510, 509, 1, 0, 0, 0, 511, 93, 1, 0, 0, 0, 512, 514, 7, 0, 0, 0, 513, 512, 1, 0, 0, 0, 513, 514, 1, 0, 0, 0, 514, 515, 1, 0, 0, 0, 515, 516, 5, 28, 0, 0, 516, 95, 1, 0, 0, 0, 517, 519, 7, 0, 0, 0, 518, 517, 1, 0, 0, 0, 518, 519, 1, 0, 0, 0, 519, 520, 1, 0, 0, 0, 520, 521, 5, 27, 0, 0, 521, 97, 1, 0, 0, 0, 522, 523, 5, 26, 0, 0, 523, 99, 1, 0, 0, 0, 524, 525, 7, 7, 0, 0, 525, 101, 1, 0, 0, 0, 526, 527, 5, 5, 0, 0, 527, 528, 3, 104, 52, 0, 528, 103, 1, 0, 0, 0, 529, 530, 5, 65, 0, 0, 530, 531, 3, 2, 1, 0, 531, 532, 5, 66, 0, 0, 532, 105, 1, 0, 0, 0, 533, 534, 5, 13, 0, 0, 534, 535, 5, 100, 0, 0, 535, 107, 1, 0, 0, 0, 536, 537, 5, 3, 0, 0, 537, 540, 5, 90, 0, 0, 538, 539, 5, 88, 0, 0, 539, 541, 3, 54, 27, 0, 540, 538, 1, 0, 0, 0, 540, 541, 1, 0, 0, 0, 541, 551, 1, 0, 0, 0, 542, 543, 5, 89, 0, 0, 543, 548, 3, 110, 55, 0, 544, 545, 5, 34, 0, 0, 545, 547, 3, 110, 55, 0, 546, 544, 1, 0, 0, 0, 547, 550, 1, 0, 0, 0, 548, 546, 1, 0, 0, 0, 548, 549, 1, 0, 0, 0, 549, 552, 1, 0, 0, 0, 550, 548, 1, 0, 0, 0, 551, 542, 1, 0, 0, 0, 551, 552, 1, 0, 0, 0, 552, 109, 1, 0, 0, 0, 553, 554, 3, 54, 27, 0, 554, 555, 5, 32, 0, 0, 555, 557, 1, 0, 0, 0, 556, 553, 1, 0, 0, 0, 556, 557, 1, 0, 0, 0, 557, 558, 1, 0, 0, 0, 558, 559, 3, 54, 27, 0, 559, 111, 1, 0, 0, 0, 560, 561, 5, 18, 0, 0, 561, 562, 3, 34, 17, 0, 562, 563, 5, 88, 0, 0, 563, 564, 3, 56, 28, 0, 564, 113, 1, 0, 0, 0, 565, 566, 5, 17, 0, 0, 566, 569, 3, 28, 14, 0, 567, 568, 5, 29, 0, 0, 568, 570, 3, 28, 14, 0, 569, 567, 1, 0, 0, 0, 569, 570, 1, 0, 0, 0, 570, 115, 1, 0, 0, 0, 54, 127, 136, 154, 166, 175, 183, 189, 197, 199, 204, 211, 216, 227, 233, 241, 243, 254, 261, 272, 275, 289, 297, 305, 309, 316, 324, 332, 345, 349, 353, 360, 364, 371, 379, 387, 409, 420, 431, 436, 440, 451, 456, 460, 474, 485, 499, 510, 513, 518, 540, 548, 551, 556, 569] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index 578da6fe786a..cf1c5fa691d1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -26,31 +26,30 @@ public class EsqlBaseParser extends ParserConfig { new PredictionContextCache(); public static final int DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, KEEP=8, - LIMIT=9, META=10, MV_EXPAND=11, RENAME=12, ROW=13, SHOW=14, SORT=15, STATS=16, - WHERE=17, DEV_INLINESTATS=18, DEV_LOOKUP=19, DEV_MATCH=20, DEV_METRICS=21, - UNKNOWN_CMD=22, LINE_COMMENT=23, MULTILINE_COMMENT=24, WS=25, PIPE=26, - QUOTED_STRING=27, INTEGER_LITERAL=28, DECIMAL_LITERAL=29, BY=30, AND=31, - ASC=32, ASSIGN=33, CAST_OP=34, COMMA=35, DESC=36, DOT=37, FALSE=38, FIRST=39, - IN=40, IS=41, LAST=42, LIKE=43, LP=44, NOT=45, NULL=46, NULLS=47, OR=48, - PARAM=49, RLIKE=50, RP=51, TRUE=52, EQ=53, CIEQ=54, NEQ=55, LT=56, LTE=57, - GT=58, GTE=59, PLUS=60, MINUS=61, ASTERISK=62, SLASH=63, PERCENT=64, NAMED_OR_POSITIONAL_PARAM=65, - OPENING_BRACKET=66, CLOSING_BRACKET=67, UNQUOTED_IDENTIFIER=68, QUOTED_IDENTIFIER=69, - EXPR_LINE_COMMENT=70, EXPR_MULTILINE_COMMENT=71, EXPR_WS=72, EXPLAIN_WS=73, - EXPLAIN_LINE_COMMENT=74, EXPLAIN_MULTILINE_COMMENT=75, METADATA=76, UNQUOTED_SOURCE=77, - FROM_LINE_COMMENT=78, FROM_MULTILINE_COMMENT=79, FROM_WS=80, ID_PATTERN=81, - PROJECT_LINE_COMMENT=82, PROJECT_MULTILINE_COMMENT=83, PROJECT_WS=84, - AS=85, RENAME_LINE_COMMENT=86, RENAME_MULTILINE_COMMENT=87, RENAME_WS=88, - ON=89, WITH=90, ENRICH_POLICY_NAME=91, ENRICH_LINE_COMMENT=92, ENRICH_MULTILINE_COMMENT=93, - ENRICH_WS=94, ENRICH_FIELD_LINE_COMMENT=95, ENRICH_FIELD_MULTILINE_COMMENT=96, - ENRICH_FIELD_WS=97, MVEXPAND_LINE_COMMENT=98, MVEXPAND_MULTILINE_COMMENT=99, - MVEXPAND_WS=100, INFO=101, SHOW_LINE_COMMENT=102, SHOW_MULTILINE_COMMENT=103, - SHOW_WS=104, FUNCTIONS=105, META_LINE_COMMENT=106, META_MULTILINE_COMMENT=107, - META_WS=108, COLON=109, SETTING=110, SETTING_LINE_COMMENT=111, SETTTING_MULTILINE_COMMENT=112, - SETTING_WS=113, LOOKUP_LINE_COMMENT=114, LOOKUP_MULTILINE_COMMENT=115, - LOOKUP_WS=116, LOOKUP_FIELD_LINE_COMMENT=117, LOOKUP_FIELD_MULTILINE_COMMENT=118, - LOOKUP_FIELD_WS=119, METRICS_LINE_COMMENT=120, METRICS_MULTILINE_COMMENT=121, - METRICS_WS=122, CLOSING_METRICS_LINE_COMMENT=123, CLOSING_METRICS_MULTILINE_COMMENT=124, - CLOSING_METRICS_WS=125; + LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, + WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_MATCH=19, DEV_METRICS=20, + UNKNOWN_CMD=21, LINE_COMMENT=22, MULTILINE_COMMENT=23, WS=24, PIPE=25, + QUOTED_STRING=26, INTEGER_LITERAL=27, DECIMAL_LITERAL=28, BY=29, AND=30, + ASC=31, ASSIGN=32, CAST_OP=33, COMMA=34, DESC=35, DOT=36, FALSE=37, FIRST=38, + IN=39, IS=40, LAST=41, LIKE=42, LP=43, NOT=44, NULL=45, NULLS=46, OR=47, + PARAM=48, RLIKE=49, RP=50, TRUE=51, EQ=52, CIEQ=53, NEQ=54, LT=55, LTE=56, + GT=57, GTE=58, PLUS=59, MINUS=60, ASTERISK=61, SLASH=62, PERCENT=63, NAMED_OR_POSITIONAL_PARAM=64, + OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, + EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, EXPLAIN_WS=72, + EXPLAIN_LINE_COMMENT=73, EXPLAIN_MULTILINE_COMMENT=74, METADATA=75, UNQUOTED_SOURCE=76, + FROM_LINE_COMMENT=77, FROM_MULTILINE_COMMENT=78, FROM_WS=79, ID_PATTERN=80, + PROJECT_LINE_COMMENT=81, PROJECT_MULTILINE_COMMENT=82, PROJECT_WS=83, + AS=84, RENAME_LINE_COMMENT=85, RENAME_MULTILINE_COMMENT=86, RENAME_WS=87, + ON=88, WITH=89, ENRICH_POLICY_NAME=90, ENRICH_LINE_COMMENT=91, ENRICH_MULTILINE_COMMENT=92, + ENRICH_WS=93, ENRICH_FIELD_LINE_COMMENT=94, ENRICH_FIELD_MULTILINE_COMMENT=95, + ENRICH_FIELD_WS=96, MVEXPAND_LINE_COMMENT=97, MVEXPAND_MULTILINE_COMMENT=98, + MVEXPAND_WS=99, INFO=100, SHOW_LINE_COMMENT=101, SHOW_MULTILINE_COMMENT=102, + SHOW_WS=103, COLON=104, SETTING=105, SETTING_LINE_COMMENT=106, SETTTING_MULTILINE_COMMENT=107, + SETTING_WS=108, LOOKUP_LINE_COMMENT=109, LOOKUP_MULTILINE_COMMENT=110, + LOOKUP_WS=111, LOOKUP_FIELD_LINE_COMMENT=112, LOOKUP_FIELD_MULTILINE_COMMENT=113, + LOOKUP_FIELD_WS=114, METRICS_LINE_COMMENT=115, METRICS_MULTILINE_COMMENT=116, + METRICS_WS=117, CLOSING_METRICS_LINE_COMMENT=118, CLOSING_METRICS_MULTILINE_COMMENT=119, + CLOSING_METRICS_WS=120; public static final int RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3, RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6, @@ -69,8 +68,8 @@ public class EsqlBaseParser extends ParserConfig { RULE_booleanValue = 45, RULE_numericValue = 46, RULE_decimalValue = 47, RULE_integerValue = 48, RULE_string = 49, RULE_comparisonOperator = 50, RULE_explainCommand = 51, RULE_subqueryExpression = 52, RULE_showCommand = 53, - RULE_metaCommand = 54, RULE_enrichCommand = 55, RULE_enrichWithClause = 56, - RULE_lookupCommand = 57, RULE_inlinestatsCommand = 58; + RULE_enrichCommand = 54, RULE_enrichWithClause = 55, RULE_lookupCommand = 56, + RULE_inlinestatsCommand = 57; private static String[] makeRuleNames() { return new String[] { "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", @@ -84,8 +83,8 @@ public class EsqlBaseParser extends ParserConfig { "dropCommand", "renameCommand", "renameClause", "dissectCommand", "grokCommand", "mvExpandCommand", "commandOptions", "commandOption", "booleanValue", "numericValue", "decimalValue", "integerValue", "string", "comparisonOperator", - "explainCommand", "subqueryExpression", "showCommand", "metaCommand", - "enrichCommand", "enrichWithClause", "lookupCommand", "inlinestatsCommand" + "explainCommand", "subqueryExpression", "showCommand", "enrichCommand", + "enrichWithClause", "lookupCommand", "inlinestatsCommand" }; } public static final String[] ruleNames = makeRuleNames(); @@ -93,25 +92,25 @@ public class EsqlBaseParser extends ParserConfig { private static String[] makeLiteralNames() { return new String[] { null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", - "'grok'", "'keep'", "'limit'", "'meta'", "'mv_expand'", "'rename'", "'row'", - "'show'", "'sort'", "'stats'", "'where'", null, null, null, null, null, - null, null, null, "'|'", null, null, null, "'by'", "'and'", "'asc'", - "'='", "'::'", "','", "'desc'", "'.'", "'false'", "'first'", "'in'", - "'is'", "'last'", "'like'", "'('", "'not'", "'null'", "'nulls'", "'or'", - "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", - "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, null, "']'", - null, null, null, null, null, null, null, null, "'metadata'", null, null, - null, null, null, null, null, null, "'as'", null, null, null, "'on'", - "'with'", null, null, null, null, null, null, null, null, null, null, - "'info'", null, null, null, "'functions'", null, null, null, "':'" + "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", + "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, + null, "'|'", null, null, null, "'by'", "'and'", "'asc'", "'='", "'::'", + "','", "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", + "'like'", "'('", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", + "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", + "'+'", "'-'", "'*'", "'/'", "'%'", null, null, "']'", null, null, null, + null, null, null, null, null, "'metadata'", null, null, null, null, null, + null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, + null, null, null, null, null, null, null, null, "'info'", null, null, + null, "':'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); private static String[] makeSymbolicNames() { return new String[] { null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", - "KEEP", "LIMIT", "META", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", - "STATS", "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_MATCH", "DEV_METRICS", + "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", + "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_MATCH", "DEV_METRICS", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", @@ -127,8 +126,7 @@ public class EsqlBaseParser extends ParserConfig { "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", - "SHOW_MULTILINE_COMMENT", "SHOW_WS", "FUNCTIONS", "META_LINE_COMMENT", - "META_MULTILINE_COMMENT", "META_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", + "SHOW_MULTILINE_COMMENT", "SHOW_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", @@ -220,9 +218,9 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(118); + setState(116); query(0); - setState(119); + setState(117); match(EOF); } } @@ -318,11 +316,11 @@ public class EsqlBaseParser extends ParserConfig { _ctx = _localctx; _prevctx = _localctx; - setState(122); + setState(120); sourceCommand(); } _ctx.stop = _input.LT(-1); - setState(129); + setState(127); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -333,16 +331,16 @@ public class EsqlBaseParser extends ParserConfig { { _localctx = new CompositeQueryContext(new QueryContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_query); - setState(124); + setState(122); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(125); + setState(123); match(PIPE); - setState(126); + setState(124); processingCommand(); } } } - setState(131); + setState(129); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); } @@ -367,9 +365,6 @@ public class EsqlBaseParser extends ParserConfig { public FromCommandContext fromCommand() { return getRuleContext(FromCommandContext.class,0); } - public MetaCommandContext metaCommand() { - return getRuleContext(MetaCommandContext.class,0); - } public RowCommandContext rowCommand() { return getRuleContext(RowCommandContext.class,0); } @@ -403,50 +398,43 @@ public class EsqlBaseParser extends ParserConfig { SourceCommandContext _localctx = new SourceCommandContext(_ctx, getState()); enterRule(_localctx, 4, RULE_sourceCommand); try { - setState(139); + setState(136); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,1,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(132); + setState(130); explainCommand(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(133); + setState(131); fromCommand(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(134); - metaCommand(); + setState(132); + rowCommand(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(135); - rowCommand(); + setState(133); + showCommand(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(136); - showCommand(); - } - break; - case 6: - enterOuterAlt(_localctx, 6); - { - setState(137); + setState(134); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(138); + setState(135); metricsCommand(); } break; @@ -531,108 +519,108 @@ public class EsqlBaseParser extends ParserConfig { ProcessingCommandContext _localctx = new ProcessingCommandContext(_ctx, getState()); enterRule(_localctx, 6, RULE_processingCommand); try { - setState(157); + setState(154); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(141); + setState(138); evalCommand(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(142); + setState(139); whereCommand(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(143); + setState(140); keepCommand(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(144); + setState(141); limitCommand(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(145); + setState(142); statsCommand(); } break; case 6: enterOuterAlt(_localctx, 6); { - setState(146); + setState(143); sortCommand(); } break; case 7: enterOuterAlt(_localctx, 7); { - setState(147); + setState(144); dropCommand(); } break; case 8: enterOuterAlt(_localctx, 8); { - setState(148); + setState(145); renameCommand(); } break; case 9: enterOuterAlt(_localctx, 9); { - setState(149); + setState(146); dissectCommand(); } break; case 10: enterOuterAlt(_localctx, 10); { - setState(150); + setState(147); grokCommand(); } break; case 11: enterOuterAlt(_localctx, 11); { - setState(151); + setState(148); enrichCommand(); } break; case 12: enterOuterAlt(_localctx, 12); { - setState(152); + setState(149); mvExpandCommand(); } break; case 13: enterOuterAlt(_localctx, 13); { - setState(153); + setState(150); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(154); + setState(151); inlinestatsCommand(); } break; case 14: enterOuterAlt(_localctx, 14); { - setState(155); + setState(152); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(156); + setState(153); lookupCommand(); } break; @@ -681,9 +669,9 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(159); + setState(156); match(WHERE); - setState(160); + setState(157); booleanExpression(0); } } @@ -899,7 +887,7 @@ public class EsqlBaseParser extends ParserConfig { int _alt; enterOuterAlt(_localctx, 1); { - setState(192); + setState(189); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) { case 1: @@ -908,9 +896,9 @@ public class EsqlBaseParser extends ParserConfig { _ctx = _localctx; _prevctx = _localctx; - setState(163); + setState(160); match(NOT); - setState(164); + setState(161); booleanExpression(8); } break; @@ -919,7 +907,7 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(165); + setState(162); valueExpression(); } break; @@ -928,7 +916,7 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new RegexExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(166); + setState(163); regexBooleanExpression(); } break; @@ -937,41 +925,41 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new LogicalInContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(167); + setState(164); valueExpression(); - setState(169); + setState(166); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(168); + setState(165); match(NOT); } } - setState(171); + setState(168); match(IN); - setState(172); + setState(169); match(LP); - setState(173); + setState(170); valueExpression(); - setState(178); + setState(175); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(174); + setState(171); match(COMMA); - setState(175); + setState(172); valueExpression(); } } - setState(180); + setState(177); _errHandler.sync(this); _la = _input.LA(1); } - setState(181); + setState(178); match(RP); } break; @@ -980,21 +968,21 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new IsNullContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(183); + setState(180); valueExpression(); - setState(184); + setState(181); match(IS); - setState(186); + setState(183); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(185); + setState(182); match(NOT); } } - setState(188); + setState(185); match(NULL); } break; @@ -1003,15 +991,15 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new MatchExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(190); + setState(187); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(191); + setState(188); matchBooleanExpression(); } break; } _ctx.stop = _input.LT(-1); - setState(202); + setState(199); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1019,7 +1007,7 @@ public class EsqlBaseParser extends ParserConfig { if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(200); + setState(197); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) { case 1: @@ -1027,11 +1015,11 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(194); + setState(191); if (!(precpred(_ctx, 5))) throw new FailedPredicateException(this, "precpred(_ctx, 5)"); - setState(195); + setState(192); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(196); + setState(193); ((LogicalBinaryContext)_localctx).right = booleanExpression(6); } break; @@ -1040,18 +1028,18 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(197); + setState(194); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(198); + setState(195); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(199); + setState(196); ((LogicalBinaryContext)_localctx).right = booleanExpression(5); } break; } } } - setState(204); + setState(201); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); } @@ -1106,48 +1094,48 @@ public class EsqlBaseParser extends ParserConfig { enterRule(_localctx, 12, RULE_regexBooleanExpression); int _la; try { - setState(219); + setState(216); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(205); + setState(202); valueExpression(); - setState(207); + setState(204); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(206); + setState(203); match(NOT); } } - setState(209); + setState(206); ((RegexBooleanExpressionContext)_localctx).kind = match(LIKE); - setState(210); + setState(207); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(212); + setState(209); valueExpression(); - setState(214); + setState(211); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(213); + setState(210); match(NOT); } } - setState(216); + setState(213); ((RegexBooleanExpressionContext)_localctx).kind = match(RLIKE); - setState(217); + setState(214); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; @@ -1200,11 +1188,11 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(221); + setState(218); valueExpression(); - setState(222); + setState(219); match(DEV_MATCH); - setState(223); + setState(220); ((MatchBooleanExpressionContext)_localctx).queryString = string(); } } @@ -1288,14 +1276,14 @@ public class EsqlBaseParser extends ParserConfig { ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, getState()); enterRule(_localctx, 16, RULE_valueExpression); try { - setState(230); + setState(227); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { case 1: _localctx = new ValueExpressionDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(225); + setState(222); operatorExpression(0); } break; @@ -1303,11 +1291,11 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new ComparisonContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(226); + setState(223); ((ComparisonContext)_localctx).left = operatorExpression(0); - setState(227); + setState(224); comparisonOperator(); - setState(228); + setState(225); ((ComparisonContext)_localctx).right = operatorExpression(0); } break; @@ -1432,7 +1420,7 @@ public class EsqlBaseParser extends ParserConfig { int _alt; enterOuterAlt(_localctx, 1); { - setState(236); + setState(233); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { case 1: @@ -1441,7 +1429,7 @@ public class EsqlBaseParser extends ParserConfig { _ctx = _localctx; _prevctx = _localctx; - setState(233); + setState(230); primaryExpression(0); } break; @@ -1450,7 +1438,7 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(234); + setState(231); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1461,13 +1449,13 @@ public class EsqlBaseParser extends ParserConfig { _errHandler.reportMatch(this); consume(); } - setState(235); + setState(232); operatorExpression(3); } break; } _ctx.stop = _input.LT(-1); - setState(246); + setState(243); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1475,7 +1463,7 @@ public class EsqlBaseParser extends ParserConfig { if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(244); + setState(241); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: @@ -1483,12 +1471,12 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(238); + setState(235); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(239); + setState(236); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); - if ( !(((((_la - 62)) & ~0x3f) == 0 && ((1L << (_la - 62)) & 7L) != 0)) ) { + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & -2305843009213693952L) != 0)) ) { ((ArithmeticBinaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); } else { @@ -1496,7 +1484,7 @@ public class EsqlBaseParser extends ParserConfig { _errHandler.reportMatch(this); consume(); } - setState(240); + setState(237); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(3); } break; @@ -1505,9 +1493,9 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(241); + setState(238); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(242); + setState(239); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1518,14 +1506,14 @@ public class EsqlBaseParser extends ParserConfig { _errHandler.reportMatch(this); consume(); } - setState(243); + setState(240); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(2); } break; } } } - setState(248); + setState(245); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); } @@ -1683,7 +1671,7 @@ public class EsqlBaseParser extends ParserConfig { int _alt; enterOuterAlt(_localctx, 1); { - setState(257); + setState(254); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,16,_ctx) ) { case 1: @@ -1692,7 +1680,7 @@ public class EsqlBaseParser extends ParserConfig { _ctx = _localctx; _prevctx = _localctx; - setState(250); + setState(247); constant(); } break; @@ -1701,7 +1689,7 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new DereferenceContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(251); + setState(248); qualifiedName(); } break; @@ -1710,7 +1698,7 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new FunctionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(252); + setState(249); functionExpression(); } break; @@ -1719,17 +1707,17 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new ParenthesizedExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(253); + setState(250); match(LP); - setState(254); + setState(251); booleanExpression(0); - setState(255); + setState(252); match(RP); } break; } _ctx.stop = _input.LT(-1); - setState(264); + setState(261); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,17,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1740,16 +1728,16 @@ public class EsqlBaseParser extends ParserConfig { { _localctx = new InlineCastContext(new PrimaryExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_primaryExpression); - setState(259); + setState(256); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(260); + setState(257); match(CAST_OP); - setState(261); + setState(258); dataType(); } } } - setState(266); + setState(263); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,17,_ctx); } @@ -1811,37 +1799,37 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(267); + setState(264); identifier(); - setState(268); + setState(265); match(LP); - setState(278); + setState(275); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,19,_ctx) ) { case 1: { - setState(269); + setState(266); match(ASTERISK); } break; case 2: { { - setState(270); + setState(267); booleanExpression(0); - setState(275); + setState(272); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(271); + setState(268); match(COMMA); - setState(272); + setState(269); booleanExpression(0); } } - setState(277); + setState(274); _errHandler.sync(this); _la = _input.LA(1); } @@ -1849,7 +1837,7 @@ public class EsqlBaseParser extends ParserConfig { } break; } - setState(280); + setState(277); match(RP); } } @@ -1907,7 +1895,7 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new ToDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(282); + setState(279); identifier(); } } @@ -1954,9 +1942,9 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(284); + setState(281); match(ROW); - setState(285); + setState(282); fields(); } } @@ -2010,23 +1998,23 @@ public class EsqlBaseParser extends ParserConfig { int _alt; enterOuterAlt(_localctx, 1); { - setState(287); + setState(284); field(); - setState(292); + setState(289); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,20,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(288); + setState(285); match(COMMA); - setState(289); + setState(286); field(); } } } - setState(294); + setState(291); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,20,_ctx); } @@ -2076,24 +2064,24 @@ public class EsqlBaseParser extends ParserConfig { FieldContext _localctx = new FieldContext(_ctx, getState()); enterRule(_localctx, 30, RULE_field); try { - setState(300); + setState(297); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,21,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(295); + setState(292); booleanExpression(0); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(296); + setState(293); qualifiedName(); - setState(297); + setState(294); match(ASSIGN); - setState(298); + setState(295); booleanExpression(0); } break; @@ -2153,34 +2141,34 @@ public class EsqlBaseParser extends ParserConfig { int _alt; enterOuterAlt(_localctx, 1); { - setState(302); + setState(299); match(FROM); - setState(303); + setState(300); indexPattern(); - setState(308); + setState(305); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,22,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(304); + setState(301); match(COMMA); - setState(305); + setState(302); indexPattern(); } } } - setState(310); + setState(307); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,22,_ctx); } - setState(312); + setState(309); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,23,_ctx) ) { case 1: { - setState(311); + setState(308); metadata(); } break; @@ -2231,24 +2219,24 @@ public class EsqlBaseParser extends ParserConfig { IndexPatternContext _localctx = new IndexPatternContext(_ctx, getState()); enterRule(_localctx, 34, RULE_indexPattern); try { - setState(319); + setState(316); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(314); + setState(311); clusterString(); - setState(315); + setState(312); match(COLON); - setState(316); + setState(313); indexString(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(318); + setState(315); indexString(); } break; @@ -2294,7 +2282,7 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(321); + setState(318); match(UNQUOTED_SOURCE); } } @@ -2340,7 +2328,7 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(323); + setState(320); _la = _input.LA(1); if ( !(_la==QUOTED_STRING || _la==UNQUOTED_SOURCE) ) { _errHandler.recoverInline(this); @@ -2395,20 +2383,20 @@ public class EsqlBaseParser extends ParserConfig { MetadataContext _localctx = new MetadataContext(_ctx, getState()); enterRule(_localctx, 40, RULE_metadata); try { - setState(327); + setState(324); _errHandler.sync(this); switch (_input.LA(1)) { case METADATA: enterOuterAlt(_localctx, 1); { - setState(325); + setState(322); metadataOption(); } break; case OPENING_BRACKET: enterOuterAlt(_localctx, 2); { - setState(326); + setState(323); deprecated_metadata(); } break; @@ -2465,25 +2453,25 @@ public class EsqlBaseParser extends ParserConfig { int _alt; enterOuterAlt(_localctx, 1); { - setState(329); + setState(326); match(METADATA); - setState(330); + setState(327); match(UNQUOTED_SOURCE); - setState(335); + setState(332); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,26,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(331); + setState(328); match(COMMA); - setState(332); + setState(329); match(UNQUOTED_SOURCE); } } } - setState(337); + setState(334); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,26,_ctx); } @@ -2532,11 +2520,11 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(338); + setState(335); match(OPENING_BRACKET); - setState(339); + setState(336); metadataOption(); - setState(340); + setState(337); match(CLOSING_BRACKET); } } @@ -2600,46 +2588,46 @@ public class EsqlBaseParser extends ParserConfig { int _alt; enterOuterAlt(_localctx, 1); { - setState(342); + setState(339); match(DEV_METRICS); - setState(343); + setState(340); indexPattern(); - setState(348); + setState(345); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(344); + setState(341); match(COMMA); - setState(345); + setState(342); indexPattern(); } } } - setState(350); + setState(347); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); } - setState(352); + setState(349); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,28,_ctx) ) { case 1: { - setState(351); + setState(348); ((MetricsCommandContext)_localctx).aggregates = fields(); } break; } - setState(356); + setState(353); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { case 1: { - setState(354); + setState(351); match(BY); - setState(355); + setState(352); ((MetricsCommandContext)_localctx).grouping = fields(); } break; @@ -2689,9 +2677,9 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(358); + setState(355); match(EVAL); - setState(359); + setState(356); fields(); } } @@ -2744,26 +2732,26 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(361); + setState(358); match(STATS); - setState(363); + setState(360); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { case 1: { - setState(362); + setState(359); ((StatsCommandContext)_localctx).stats = fields(); } break; } - setState(367); + setState(364); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { case 1: { - setState(365); + setState(362); match(BY); - setState(366); + setState(363); ((StatsCommandContext)_localctx).grouping = fields(); } break; @@ -2820,23 +2808,23 @@ public class EsqlBaseParser extends ParserConfig { int _alt; enterOuterAlt(_localctx, 1); { - setState(369); + setState(366); identifier(); - setState(374); + setState(371); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,32,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(370); + setState(367); match(DOT); - setState(371); + setState(368); identifier(); } } } - setState(376); + setState(373); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,32,_ctx); } @@ -2892,23 +2880,23 @@ public class EsqlBaseParser extends ParserConfig { int _alt; enterOuterAlt(_localctx, 1); { - setState(377); + setState(374); identifierPattern(); - setState(382); + setState(379); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,33,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(378); + setState(375); match(DOT); - setState(379); + setState(376); identifierPattern(); } } } - setState(384); + setState(381); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,33,_ctx); } @@ -2964,23 +2952,23 @@ public class EsqlBaseParser extends ParserConfig { int _alt; enterOuterAlt(_localctx, 1); { - setState(385); + setState(382); qualifiedNamePattern(); - setState(390); + setState(387); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(386); + setState(383); match(COMMA); - setState(387); + setState(384); qualifiedNamePattern(); } } } - setState(392); + setState(389); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); } @@ -3028,7 +3016,7 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(393); + setState(390); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -3080,7 +3068,7 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(395); + setState(392); match(ID_PATTERN); } } @@ -3351,14 +3339,14 @@ public class EsqlBaseParser extends ParserConfig { enterRule(_localctx, 62, RULE_constant); int _la; try { - setState(439); + setState(436); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,38,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(397); + setState(394); match(NULL); } break; @@ -3366,9 +3354,9 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(398); + setState(395); integerValue(); - setState(399); + setState(396); match(UNQUOTED_IDENTIFIER); } break; @@ -3376,7 +3364,7 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(401); + setState(398); decimalValue(); } break; @@ -3384,7 +3372,7 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(402); + setState(399); integerValue(); } break; @@ -3392,7 +3380,7 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(403); + setState(400); booleanValue(); } break; @@ -3400,7 +3388,7 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new InputParamsContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(404); + setState(401); params(); } break; @@ -3408,7 +3396,7 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(405); + setState(402); string(); } break; @@ -3416,27 +3404,27 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(406); + setState(403); match(OPENING_BRACKET); - setState(407); + setState(404); numericValue(); - setState(412); + setState(409); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(408); + setState(405); match(COMMA); - setState(409); + setState(406); numericValue(); } } - setState(414); + setState(411); _errHandler.sync(this); _la = _input.LA(1); } - setState(415); + setState(412); match(CLOSING_BRACKET); } break; @@ -3444,27 +3432,27 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(417); + setState(414); match(OPENING_BRACKET); - setState(418); + setState(415); booleanValue(); - setState(423); + setState(420); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(419); + setState(416); match(COMMA); - setState(420); + setState(417); booleanValue(); } } - setState(425); + setState(422); _errHandler.sync(this); _la = _input.LA(1); } - setState(426); + setState(423); match(CLOSING_BRACKET); } break; @@ -3472,27 +3460,27 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(428); + setState(425); match(OPENING_BRACKET); - setState(429); + setState(426); string(); - setState(434); + setState(431); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(430); + setState(427); match(COMMA); - setState(431); + setState(428); string(); } } - setState(436); + setState(433); _errHandler.sync(this); _la = _input.LA(1); } - setState(437); + setState(434); match(CLOSING_BRACKET); } break; @@ -3566,14 +3554,14 @@ public class EsqlBaseParser extends ParserConfig { ParamsContext _localctx = new ParamsContext(_ctx, getState()); enterRule(_localctx, 64, RULE_params); try { - setState(443); + setState(440); _errHandler.sync(this); switch (_input.LA(1)) { case PARAM: _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(441); + setState(438); match(PARAM); } break; @@ -3581,7 +3569,7 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new InputNamedOrPositionalParamContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(442); + setState(439); match(NAMED_OR_POSITIONAL_PARAM); } break; @@ -3630,9 +3618,9 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(445); + setState(442); match(LIMIT); - setState(446); + setState(443); match(INTEGER_LITERAL); } } @@ -3687,25 +3675,25 @@ public class EsqlBaseParser extends ParserConfig { int _alt; enterOuterAlt(_localctx, 1); { - setState(448); + setState(445); match(SORT); - setState(449); + setState(446); orderExpression(); - setState(454); + setState(451); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,40,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(450); + setState(447); match(COMMA); - setState(451); + setState(448); orderExpression(); } } } - setState(456); + setState(453); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,40,_ctx); } @@ -3761,14 +3749,14 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(457); + setState(454); booleanExpression(0); - setState(459); + setState(456); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,41,_ctx) ) { case 1: { - setState(458); + setState(455); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -3782,14 +3770,14 @@ public class EsqlBaseParser extends ParserConfig { } break; } - setState(463); + setState(460); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,42,_ctx) ) { case 1: { - setState(461); + setState(458); match(NULLS); - setState(462); + setState(459); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -3848,9 +3836,9 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(465); + setState(462); match(KEEP); - setState(466); + setState(463); qualifiedNamePatterns(); } } @@ -3897,9 +3885,9 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(468); + setState(465); match(DROP); - setState(469); + setState(466); qualifiedNamePatterns(); } } @@ -3954,25 +3942,25 @@ public class EsqlBaseParser extends ParserConfig { int _alt; enterOuterAlt(_localctx, 1); { - setState(471); + setState(468); match(RENAME); - setState(472); + setState(469); renameClause(); - setState(477); + setState(474); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,43,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(473); + setState(470); match(COMMA); - setState(474); + setState(471); renameClause(); } } } - setState(479); + setState(476); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,43,_ctx); } @@ -4026,11 +4014,11 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(480); + setState(477); ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); - setState(481); + setState(478); match(AS); - setState(482); + setState(479); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } @@ -4083,18 +4071,18 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(484); + setState(481); match(DISSECT); - setState(485); + setState(482); primaryExpression(0); - setState(486); + setState(483); string(); - setState(488); + setState(485); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,44,_ctx) ) { case 1: { - setState(487); + setState(484); commandOptions(); } break; @@ -4147,11 +4135,11 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(490); + setState(487); match(GROK); - setState(491); + setState(488); primaryExpression(0); - setState(492); + setState(489); string(); } } @@ -4198,9 +4186,9 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(494); + setState(491); match(MV_EXPAND); - setState(495); + setState(492); qualifiedName(); } } @@ -4254,23 +4242,23 @@ public class EsqlBaseParser extends ParserConfig { int _alt; enterOuterAlt(_localctx, 1); { - setState(497); + setState(494); commandOption(); - setState(502); + setState(499); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,45,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(498); + setState(495); match(COMMA); - setState(499); + setState(496); commandOption(); } } } - setState(504); + setState(501); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,45,_ctx); } @@ -4322,11 +4310,11 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(505); + setState(502); identifier(); - setState(506); + setState(503); match(ASSIGN); - setState(507); + setState(504); constant(); } } @@ -4372,7 +4360,7 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(509); + setState(506); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -4427,20 +4415,20 @@ public class EsqlBaseParser extends ParserConfig { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); enterRule(_localctx, 92, RULE_numericValue); try { - setState(513); + setState(510); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(511); + setState(508); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(512); + setState(509); integerValue(); } break; @@ -4489,12 +4477,12 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(516); + setState(513); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(515); + setState(512); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4507,7 +4495,7 @@ public class EsqlBaseParser extends ParserConfig { } } - setState(518); + setState(515); match(DECIMAL_LITERAL); } } @@ -4554,12 +4542,12 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(521); + setState(518); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(520); + setState(517); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4572,7 +4560,7 @@ public class EsqlBaseParser extends ParserConfig { } } - setState(523); + setState(520); match(INTEGER_LITERAL); } } @@ -4616,7 +4604,7 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(525); + setState(522); match(QUOTED_STRING); } } @@ -4666,9 +4654,9 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(527); + setState(524); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 1125899906842624000L) != 0)) ) { + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 562949953421312000L) != 0)) ) { _errHandler.recoverInline(this); } else { @@ -4721,9 +4709,9 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(529); + setState(526); match(EXPLAIN); - setState(530); + setState(527); subqueryExpression(); } } @@ -4771,11 +4759,11 @@ public class EsqlBaseParser extends ParserConfig { try { enterOuterAlt(_localctx, 1); { - setState(532); + setState(529); match(OPENING_BRACKET); - setState(533); + setState(530); query(0); - setState(534); + setState(531); match(CLOSING_BRACKET); } } @@ -4832,9 +4820,9 @@ public class EsqlBaseParser extends ParserConfig { _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(536); + setState(533); match(SHOW); - setState(537); + setState(534); match(INFO); } } @@ -4849,65 +4837,6 @@ public class EsqlBaseParser extends ParserConfig { return _localctx; } - @SuppressWarnings("CheckReturnValue") - public static class MetaCommandContext extends ParserRuleContext { - @SuppressWarnings("this-escape") - public MetaCommandContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_metaCommand; } - - @SuppressWarnings("this-escape") - public MetaCommandContext() { } - public void copyFrom(MetaCommandContext ctx) { - super.copyFrom(ctx); - } - } - @SuppressWarnings("CheckReturnValue") - public static class MetaFunctionsContext extends MetaCommandContext { - public TerminalNode META() { return getToken(EsqlBaseParser.META, 0); } - public TerminalNode FUNCTIONS() { return getToken(EsqlBaseParser.FUNCTIONS, 0); } - @SuppressWarnings("this-escape") - public MetaFunctionsContext(MetaCommandContext ctx) { copyFrom(ctx); } - @Override - public void enterRule(ParseTreeListener listener) { - if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterMetaFunctions(this); - } - @Override - public void exitRule(ParseTreeListener listener) { - if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitMetaFunctions(this); - } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitMetaFunctions(this); - else return visitor.visitChildren(this); - } - } - - public final MetaCommandContext metaCommand() throws RecognitionException { - MetaCommandContext _localctx = new MetaCommandContext(_ctx, getState()); - enterRule(_localctx, 108, RULE_metaCommand); - try { - _localctx = new MetaFunctionsContext(_localctx); - enterOuterAlt(_localctx, 1); - { - setState(539); - match(META); - setState(540); - match(FUNCTIONS); - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - @SuppressWarnings("CheckReturnValue") public static class EnrichCommandContext extends ParserRuleContext { public Token policyName; @@ -4951,51 +4880,51 @@ public class EsqlBaseParser extends ParserConfig { public final EnrichCommandContext enrichCommand() throws RecognitionException { EnrichCommandContext _localctx = new EnrichCommandContext(_ctx, getState()); - enterRule(_localctx, 110, RULE_enrichCommand); + enterRule(_localctx, 108, RULE_enrichCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(542); + setState(536); match(ENRICH); - setState(543); + setState(537); ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); - setState(546); + setState(540); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,49,_ctx) ) { case 1: { - setState(544); + setState(538); match(ON); - setState(545); + setState(539); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(557); + setState(551); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,51,_ctx) ) { case 1: { - setState(548); + setState(542); match(WITH); - setState(549); + setState(543); enrichWithClause(); - setState(554); + setState(548); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,50,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(550); + setState(544); match(COMMA); - setState(551); + setState(545); enrichWithClause(); } } } - setState(556); + setState(550); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,50,_ctx); } @@ -5048,23 +4977,23 @@ public class EsqlBaseParser extends ParserConfig { public final EnrichWithClauseContext enrichWithClause() throws RecognitionException { EnrichWithClauseContext _localctx = new EnrichWithClauseContext(_ctx, getState()); - enterRule(_localctx, 112, RULE_enrichWithClause); + enterRule(_localctx, 110, RULE_enrichWithClause); try { enterOuterAlt(_localctx, 1); { - setState(562); + setState(556); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,52,_ctx) ) { case 1: { - setState(559); + setState(553); ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); - setState(560); + setState(554); match(ASSIGN); } break; } - setState(564); + setState(558); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -5113,17 +5042,17 @@ public class EsqlBaseParser extends ParserConfig { public final LookupCommandContext lookupCommand() throws RecognitionException { LookupCommandContext _localctx = new LookupCommandContext(_ctx, getState()); - enterRule(_localctx, 114, RULE_lookupCommand); + enterRule(_localctx, 112, RULE_lookupCommand); try { enterOuterAlt(_localctx, 1); { - setState(566); + setState(560); match(DEV_LOOKUP); - setState(567); + setState(561); ((LookupCommandContext)_localctx).tableName = indexPattern(); - setState(568); + setState(562); match(ON); - setState(569); + setState(563); ((LookupCommandContext)_localctx).matchFields = qualifiedNamePatterns(); } } @@ -5172,22 +5101,22 @@ public class EsqlBaseParser extends ParserConfig { public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionException { InlinestatsCommandContext _localctx = new InlinestatsCommandContext(_ctx, getState()); - enterRule(_localctx, 116, RULE_inlinestatsCommand); + enterRule(_localctx, 114, RULE_inlinestatsCommand); try { enterOuterAlt(_localctx, 1); { - setState(571); + setState(565); match(DEV_INLINESTATS); - setState(572); + setState(566); ((InlinestatsCommandContext)_localctx).stats = fields(); - setState(575); + setState(569); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,53,_ctx) ) { case 1: { - setState(573); + setState(567); match(BY); - setState(574); + setState(568); ((InlinestatsCommandContext)_localctx).grouping = fields(); } break; @@ -5274,7 +5203,7 @@ public class EsqlBaseParser extends ParserConfig { } public static final String _serializedATN = - "\u0004\u0001}\u0242\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ + "\u0004\u0001x\u023c\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ @@ -5289,359 +5218,355 @@ public class EsqlBaseParser extends ParserConfig { "(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002"+ "-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u0002"+ "2\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u00076\u0002"+ - "7\u00077\u00028\u00078\u00029\u00079\u0002:\u0007:\u0001\u0000\u0001\u0000"+ - "\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0005\u0001\u0080\b\u0001\n\u0001\f\u0001\u0083\t\u0001\u0001"+ - "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ - "\u0002\u0003\u0002\u008c\b\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ + "7\u00077\u00028\u00078\u00029\u00079\u0001\u0000\u0001\u0000\u0001\u0000"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0005\u0001~\b\u0001\n\u0001\f\u0001\u0081\t\u0001\u0001\u0002\u0001"+ + "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002\u0089"+ + "\b\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0003\u0003\u009e\b\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0003\u0005\u00aa\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0005\u0005\u00b1\b\u0005\n\u0005\f\u0005\u00b4\t\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005"+ - "\u00bb\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005"+ - "\u00c1\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0005\u0005\u00c9\b\u0005\n\u0005\f\u0005\u00cc\t\u0005\u0001"+ - "\u0006\u0001\u0006\u0003\u0006\u00d0\b\u0006\u0001\u0006\u0001\u0006\u0001"+ - "\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00d7\b\u0006\u0001\u0006\u0001"+ - "\u0006\u0001\u0006\u0003\u0006\u00dc\b\u0006\u0001\u0007\u0001\u0007\u0001"+ - "\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0003\b\u00e7"+ - "\b\b\u0001\t\u0001\t\u0001\t\u0001\t\u0003\t\u00ed\b\t\u0001\t\u0001\t"+ - "\u0001\t\u0001\t\u0001\t\u0001\t\u0005\t\u00f5\b\t\n\t\f\t\u00f8\t\t\u0001"+ - "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0003\n\u0102"+ - "\b\n\u0001\n\u0001\n\u0001\n\u0005\n\u0107\b\n\n\n\f\n\u010a\t\n\u0001"+ - "\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0005"+ - "\u000b\u0112\b\u000b\n\u000b\f\u000b\u0115\t\u000b\u0003\u000b\u0117\b"+ - "\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r"+ - "\u0001\u000e\u0001\u000e\u0001\u000e\u0005\u000e\u0123\b\u000e\n\u000e"+ - "\f\u000e\u0126\t\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f"+ - "\u0001\u000f\u0003\u000f\u012d\b\u000f\u0001\u0010\u0001\u0010\u0001\u0010"+ - "\u0001\u0010\u0005\u0010\u0133\b\u0010\n\u0010\f\u0010\u0136\t\u0010\u0001"+ - "\u0010\u0003\u0010\u0139\b\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ - "\u0011\u0001\u0011\u0003\u0011\u0140\b\u0011\u0001\u0012\u0001\u0012\u0001"+ - "\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0003\u0014\u0148\b\u0014\u0001"+ - "\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0005\u0015\u014e\b\u0015\n"+ - "\u0015\f\u0015\u0151\t\u0015\u0001\u0016\u0001\u0016\u0001\u0016\u0001"+ - "\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0005\u0017\u015b"+ - "\b\u0017\n\u0017\f\u0017\u015e\t\u0017\u0001\u0017\u0003\u0017\u0161\b"+ - "\u0017\u0001\u0017\u0001\u0017\u0003\u0017\u0165\b\u0017\u0001\u0018\u0001"+ - "\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0003\u0019\u016c\b\u0019\u0001"+ - "\u0019\u0001\u0019\u0003\u0019\u0170\b\u0019\u0001\u001a\u0001\u001a\u0001"+ - "\u001a\u0005\u001a\u0175\b\u001a\n\u001a\f\u001a\u0178\t\u001a\u0001\u001b"+ - "\u0001\u001b\u0001\u001b\u0005\u001b\u017d\b\u001b\n\u001b\f\u001b\u0180"+ - "\t\u001b\u0001\u001c\u0001\u001c\u0001\u001c\u0005\u001c\u0185\b\u001c"+ - "\n\u001c\f\u001c\u0188\t\u001c\u0001\u001d\u0001\u001d\u0001\u001e\u0001"+ - "\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001"+ - "\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001"+ - "\u001f\u0001\u001f\u0005\u001f\u019b\b\u001f\n\u001f\f\u001f\u019e\t\u001f"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0003\u0003\u009b"+ + "\b\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00a7"+ + "\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005"+ + "\u0005\u00ae\b\u0005\n\u0005\f\u0005\u00b1\t\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00b8\b\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00be\b\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005"+ + "\u00c6\b\u0005\n\u0005\f\u0005\u00c9\t\u0005\u0001\u0006\u0001\u0006\u0003"+ + "\u0006\u00cd\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ + "\u0006\u0003\u0006\u00d4\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003"+ + "\u0006\u00d9\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ + "\b\u0001\b\u0001\b\u0001\b\u0001\b\u0003\b\u00e4\b\b\u0001\t\u0001\t\u0001"+ + "\t\u0001\t\u0003\t\u00ea\b\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0005\t\u00f2\b\t\n\t\f\t\u00f5\t\t\u0001\n\u0001\n\u0001\n\u0001\n"+ + "\u0001\n\u0001\n\u0001\n\u0001\n\u0003\n\u00ff\b\n\u0001\n\u0001\n\u0001"+ + "\n\u0005\n\u0104\b\n\n\n\f\n\u0107\t\n\u0001\u000b\u0001\u000b\u0001\u000b"+ + "\u0001\u000b\u0001\u000b\u0001\u000b\u0005\u000b\u010f\b\u000b\n\u000b"+ + "\f\u000b\u0112\t\u000b\u0003\u000b\u0114\b\u000b\u0001\u000b\u0001\u000b"+ + "\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001"+ + "\u000e\u0005\u000e\u0120\b\u000e\n\u000e\f\u000e\u0123\t\u000e\u0001\u000f"+ + "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0003\u000f\u012a\b\u000f"+ + "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0005\u0010\u0130\b\u0010"+ + "\n\u0010\f\u0010\u0133\t\u0010\u0001\u0010\u0003\u0010\u0136\b\u0010\u0001"+ + "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0003\u0011\u013d"+ + "\b\u0011\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0014\u0001"+ + "\u0014\u0003\u0014\u0145\b\u0014\u0001\u0015\u0001\u0015\u0001\u0015\u0001"+ + "\u0015\u0005\u0015\u014b\b\u0015\n\u0015\f\u0015\u014e\t\u0015\u0001\u0016"+ + "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017\u0001\u0017"+ + "\u0001\u0017\u0005\u0017\u0158\b\u0017\n\u0017\f\u0017\u015b\t\u0017\u0001"+ + "\u0017\u0003\u0017\u015e\b\u0017\u0001\u0017\u0001\u0017\u0003\u0017\u0162"+ + "\b\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0003"+ + "\u0019\u0169\b\u0019\u0001\u0019\u0001\u0019\u0003\u0019\u016d\b\u0019"+ + "\u0001\u001a\u0001\u001a\u0001\u001a\u0005\u001a\u0172\b\u001a\n\u001a"+ + "\f\u001a\u0175\t\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0005\u001b"+ + "\u017a\b\u001b\n\u001b\f\u001b\u017d\t\u001b\u0001\u001c\u0001\u001c\u0001"+ + "\u001c\u0005\u001c\u0182\b\u001c\n\u001c\f\u001c\u0185\t\u001c\u0001\u001d"+ + "\u0001\u001d\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0001\u001f"+ "\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f"+ - "\u0005\u001f\u01a6\b\u001f\n\u001f\f\u001f\u01a9\t\u001f\u0001\u001f\u0001"+ - "\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u01b1"+ - "\b\u001f\n\u001f\f\u001f\u01b4\t\u001f\u0001\u001f\u0001\u001f\u0003\u001f"+ - "\u01b8\b\u001f\u0001 \u0001 \u0003 \u01bc\b \u0001!\u0001!\u0001!\u0001"+ - "\"\u0001\"\u0001\"\u0001\"\u0005\"\u01c5\b\"\n\"\f\"\u01c8\t\"\u0001#"+ - "\u0001#\u0003#\u01cc\b#\u0001#\u0001#\u0003#\u01d0\b#\u0001$\u0001$\u0001"+ - "$\u0001%\u0001%\u0001%\u0001&\u0001&\u0001&\u0001&\u0005&\u01dc\b&\n&"+ - "\f&\u01df\t&\u0001\'\u0001\'\u0001\'\u0001\'\u0001(\u0001(\u0001(\u0001"+ - "(\u0003(\u01e9\b(\u0001)\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0001"+ - "+\u0001+\u0001+\u0005+\u01f5\b+\n+\f+\u01f8\t+\u0001,\u0001,\u0001,\u0001"+ - ",\u0001-\u0001-\u0001.\u0001.\u0003.\u0202\b.\u0001/\u0003/\u0205\b/\u0001"+ - "/\u0001/\u00010\u00030\u020a\b0\u00010\u00010\u00011\u00011\u00012\u0001"+ - "2\u00013\u00013\u00013\u00014\u00014\u00014\u00014\u00015\u00015\u0001"+ - "5\u00016\u00016\u00016\u00017\u00017\u00017\u00017\u00037\u0223\b7\u0001"+ - "7\u00017\u00017\u00017\u00057\u0229\b7\n7\f7\u022c\t7\u00037\u022e\b7"+ - "\u00018\u00018\u00018\u00038\u0233\b8\u00018\u00018\u00019\u00019\u0001"+ - "9\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0003:\u0240\b:\u0001:\u0000"+ - "\u0004\u0002\n\u0012\u0014;\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010"+ - "\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPR"+ - "TVXZ\\^`bdfhjlnprt\u0000\b\u0001\u0000<=\u0001\u0000>@\u0002\u0000\u001b"+ - "\u001bMM\u0001\u0000DE\u0002\u0000 $$\u0002\u0000\'\'**\u0002\u0000&"+ - "&44\u0002\u0000557;\u025b\u0000v\u0001\u0000\u0000\u0000\u0002y\u0001"+ - "\u0000\u0000\u0000\u0004\u008b\u0001\u0000\u0000\u0000\u0006\u009d\u0001"+ - "\u0000\u0000\u0000\b\u009f\u0001\u0000\u0000\u0000\n\u00c0\u0001\u0000"+ - "\u0000\u0000\f\u00db\u0001\u0000\u0000\u0000\u000e\u00dd\u0001\u0000\u0000"+ - "\u0000\u0010\u00e6\u0001\u0000\u0000\u0000\u0012\u00ec\u0001\u0000\u0000"+ - "\u0000\u0014\u0101\u0001\u0000\u0000\u0000\u0016\u010b\u0001\u0000\u0000"+ - "\u0000\u0018\u011a\u0001\u0000\u0000\u0000\u001a\u011c\u0001\u0000\u0000"+ - "\u0000\u001c\u011f\u0001\u0000\u0000\u0000\u001e\u012c\u0001\u0000\u0000"+ - "\u0000 \u012e\u0001\u0000\u0000\u0000\"\u013f\u0001\u0000\u0000\u0000"+ - "$\u0141\u0001\u0000\u0000\u0000&\u0143\u0001\u0000\u0000\u0000(\u0147"+ - "\u0001\u0000\u0000\u0000*\u0149\u0001\u0000\u0000\u0000,\u0152\u0001\u0000"+ - "\u0000\u0000.\u0156\u0001\u0000\u0000\u00000\u0166\u0001\u0000\u0000\u0000"+ - "2\u0169\u0001\u0000\u0000\u00004\u0171\u0001\u0000\u0000\u00006\u0179"+ - "\u0001\u0000\u0000\u00008\u0181\u0001\u0000\u0000\u0000:\u0189\u0001\u0000"+ - "\u0000\u0000<\u018b\u0001\u0000\u0000\u0000>\u01b7\u0001\u0000\u0000\u0000"+ - "@\u01bb\u0001\u0000\u0000\u0000B\u01bd\u0001\u0000\u0000\u0000D\u01c0"+ - "\u0001\u0000\u0000\u0000F\u01c9\u0001\u0000\u0000\u0000H\u01d1\u0001\u0000"+ - "\u0000\u0000J\u01d4\u0001\u0000\u0000\u0000L\u01d7\u0001\u0000\u0000\u0000"+ - "N\u01e0\u0001\u0000\u0000\u0000P\u01e4\u0001\u0000\u0000\u0000R\u01ea"+ - "\u0001\u0000\u0000\u0000T\u01ee\u0001\u0000\u0000\u0000V\u01f1\u0001\u0000"+ - "\u0000\u0000X\u01f9\u0001\u0000\u0000\u0000Z\u01fd\u0001\u0000\u0000\u0000"+ - "\\\u0201\u0001\u0000\u0000\u0000^\u0204\u0001\u0000\u0000\u0000`\u0209"+ - "\u0001\u0000\u0000\u0000b\u020d\u0001\u0000\u0000\u0000d\u020f\u0001\u0000"+ - "\u0000\u0000f\u0211\u0001\u0000\u0000\u0000h\u0214\u0001\u0000\u0000\u0000"+ - "j\u0218\u0001\u0000\u0000\u0000l\u021b\u0001\u0000\u0000\u0000n\u021e"+ - "\u0001\u0000\u0000\u0000p\u0232\u0001\u0000\u0000\u0000r\u0236\u0001\u0000"+ - "\u0000\u0000t\u023b\u0001\u0000\u0000\u0000vw\u0003\u0002\u0001\u0000"+ - "wx\u0005\u0000\u0000\u0001x\u0001\u0001\u0000\u0000\u0000yz\u0006\u0001"+ - "\uffff\uffff\u0000z{\u0003\u0004\u0002\u0000{\u0081\u0001\u0000\u0000"+ - "\u0000|}\n\u0001\u0000\u0000}~\u0005\u001a\u0000\u0000~\u0080\u0003\u0006"+ - "\u0003\u0000\u007f|\u0001\u0000\u0000\u0000\u0080\u0083\u0001\u0000\u0000"+ - "\u0000\u0081\u007f\u0001\u0000\u0000\u0000\u0081\u0082\u0001\u0000\u0000"+ - "\u0000\u0082\u0003\u0001\u0000\u0000\u0000\u0083\u0081\u0001\u0000\u0000"+ - "\u0000\u0084\u008c\u0003f3\u0000\u0085\u008c\u0003 \u0010\u0000\u0086"+ - "\u008c\u0003l6\u0000\u0087\u008c\u0003\u001a\r\u0000\u0088\u008c\u0003"+ - "j5\u0000\u0089\u008a\u0004\u0002\u0001\u0000\u008a\u008c\u0003.\u0017"+ - "\u0000\u008b\u0084\u0001\u0000\u0000\u0000\u008b\u0085\u0001\u0000\u0000"+ - "\u0000\u008b\u0086\u0001\u0000\u0000\u0000\u008b\u0087\u0001\u0000\u0000"+ - "\u0000\u008b\u0088\u0001\u0000\u0000\u0000\u008b\u0089\u0001\u0000\u0000"+ - "\u0000\u008c\u0005\u0001\u0000\u0000\u0000\u008d\u009e\u00030\u0018\u0000"+ - "\u008e\u009e\u0003\b\u0004\u0000\u008f\u009e\u0003H$\u0000\u0090\u009e"+ - "\u0003B!\u0000\u0091\u009e\u00032\u0019\u0000\u0092\u009e\u0003D\"\u0000"+ - "\u0093\u009e\u0003J%\u0000\u0094\u009e\u0003L&\u0000\u0095\u009e\u0003"+ - "P(\u0000\u0096\u009e\u0003R)\u0000\u0097\u009e\u0003n7\u0000\u0098\u009e"+ - "\u0003T*\u0000\u0099\u009a\u0004\u0003\u0002\u0000\u009a\u009e\u0003t"+ - ":\u0000\u009b\u009c\u0004\u0003\u0003\u0000\u009c\u009e\u0003r9\u0000"+ - "\u009d\u008d\u0001\u0000\u0000\u0000\u009d\u008e\u0001\u0000\u0000\u0000"+ - "\u009d\u008f\u0001\u0000\u0000\u0000\u009d\u0090\u0001\u0000\u0000\u0000"+ - "\u009d\u0091\u0001\u0000\u0000\u0000\u009d\u0092\u0001\u0000\u0000\u0000"+ - "\u009d\u0093\u0001\u0000\u0000\u0000\u009d\u0094\u0001\u0000\u0000\u0000"+ - "\u009d\u0095\u0001\u0000\u0000\u0000\u009d\u0096\u0001\u0000\u0000\u0000"+ - "\u009d\u0097\u0001\u0000\u0000\u0000\u009d\u0098\u0001\u0000\u0000\u0000"+ - "\u009d\u0099\u0001\u0000\u0000\u0000\u009d\u009b\u0001\u0000\u0000\u0000"+ - "\u009e\u0007\u0001\u0000\u0000\u0000\u009f\u00a0\u0005\u0011\u0000\u0000"+ - "\u00a0\u00a1\u0003\n\u0005\u0000\u00a1\t\u0001\u0000\u0000\u0000\u00a2"+ - "\u00a3\u0006\u0005\uffff\uffff\u0000\u00a3\u00a4\u0005-\u0000\u0000\u00a4"+ - "\u00c1\u0003\n\u0005\b\u00a5\u00c1\u0003\u0010\b\u0000\u00a6\u00c1\u0003"+ - "\f\u0006\u0000\u00a7\u00a9\u0003\u0010\b\u0000\u00a8\u00aa\u0005-\u0000"+ - "\u0000\u00a9\u00a8\u0001\u0000\u0000\u0000\u00a9\u00aa\u0001\u0000\u0000"+ - "\u0000\u00aa\u00ab\u0001\u0000\u0000\u0000\u00ab\u00ac\u0005(\u0000\u0000"+ - "\u00ac\u00ad\u0005,\u0000\u0000\u00ad\u00b2\u0003\u0010\b\u0000\u00ae"+ - "\u00af\u0005#\u0000\u0000\u00af\u00b1\u0003\u0010\b\u0000\u00b0\u00ae"+ - "\u0001\u0000\u0000\u0000\u00b1\u00b4\u0001\u0000\u0000\u0000\u00b2\u00b0"+ - "\u0001\u0000\u0000\u0000\u00b2\u00b3\u0001\u0000\u0000\u0000\u00b3\u00b5"+ - "\u0001\u0000\u0000\u0000\u00b4\u00b2\u0001\u0000\u0000\u0000\u00b5\u00b6"+ - "\u00053\u0000\u0000\u00b6\u00c1\u0001\u0000\u0000\u0000\u00b7\u00b8\u0003"+ - "\u0010\b\u0000\u00b8\u00ba\u0005)\u0000\u0000\u00b9\u00bb\u0005-\u0000"+ - "\u0000\u00ba\u00b9\u0001\u0000\u0000\u0000\u00ba\u00bb\u0001\u0000\u0000"+ - "\u0000\u00bb\u00bc\u0001\u0000\u0000\u0000\u00bc\u00bd\u0005.\u0000\u0000"+ - "\u00bd\u00c1\u0001\u0000\u0000\u0000\u00be\u00bf\u0004\u0005\u0004\u0000"+ - "\u00bf\u00c1\u0003\u000e\u0007\u0000\u00c0\u00a2\u0001\u0000\u0000\u0000"+ - "\u00c0\u00a5\u0001\u0000\u0000\u0000\u00c0\u00a6\u0001\u0000\u0000\u0000"+ - "\u00c0\u00a7\u0001\u0000\u0000\u0000\u00c0\u00b7\u0001\u0000\u0000\u0000"+ - "\u00c0\u00be\u0001\u0000\u0000\u0000\u00c1\u00ca\u0001\u0000\u0000\u0000"+ - "\u00c2\u00c3\n\u0005\u0000\u0000\u00c3\u00c4\u0005\u001f\u0000\u0000\u00c4"+ - "\u00c9\u0003\n\u0005\u0006\u00c5\u00c6\n\u0004\u0000\u0000\u00c6\u00c7"+ - "\u00050\u0000\u0000\u00c7\u00c9\u0003\n\u0005\u0005\u00c8\u00c2\u0001"+ - "\u0000\u0000\u0000\u00c8\u00c5\u0001\u0000\u0000\u0000\u00c9\u00cc\u0001"+ - "\u0000\u0000\u0000\u00ca\u00c8\u0001\u0000\u0000\u0000\u00ca\u00cb\u0001"+ - "\u0000\u0000\u0000\u00cb\u000b\u0001\u0000\u0000\u0000\u00cc\u00ca\u0001"+ - "\u0000\u0000\u0000\u00cd\u00cf\u0003\u0010\b\u0000\u00ce\u00d0\u0005-"+ - "\u0000\u0000\u00cf\u00ce\u0001\u0000\u0000\u0000\u00cf\u00d0\u0001\u0000"+ - "\u0000\u0000\u00d0\u00d1\u0001\u0000\u0000\u0000\u00d1\u00d2\u0005+\u0000"+ - "\u0000\u00d2\u00d3\u0003b1\u0000\u00d3\u00dc\u0001\u0000\u0000\u0000\u00d4"+ - "\u00d6\u0003\u0010\b\u0000\u00d5\u00d7\u0005-\u0000\u0000\u00d6\u00d5"+ - "\u0001\u0000\u0000\u0000\u00d6\u00d7\u0001\u0000\u0000\u0000\u00d7\u00d8"+ - "\u0001\u0000\u0000\u0000\u00d8\u00d9\u00052\u0000\u0000\u00d9\u00da\u0003"+ - "b1\u0000\u00da\u00dc\u0001\u0000\u0000\u0000\u00db\u00cd\u0001\u0000\u0000"+ - "\u0000\u00db\u00d4\u0001\u0000\u0000\u0000\u00dc\r\u0001\u0000\u0000\u0000"+ - "\u00dd\u00de\u0003\u0010\b\u0000\u00de\u00df\u0005\u0014\u0000\u0000\u00df"+ - "\u00e0\u0003b1\u0000\u00e0\u000f\u0001\u0000\u0000\u0000\u00e1\u00e7\u0003"+ - "\u0012\t\u0000\u00e2\u00e3\u0003\u0012\t\u0000\u00e3\u00e4\u0003d2\u0000"+ - "\u00e4\u00e5\u0003\u0012\t\u0000\u00e5\u00e7\u0001\u0000\u0000\u0000\u00e6"+ - "\u00e1\u0001\u0000\u0000\u0000\u00e6\u00e2\u0001\u0000\u0000\u0000\u00e7"+ - "\u0011\u0001\u0000\u0000\u0000\u00e8\u00e9\u0006\t\uffff\uffff\u0000\u00e9"+ - "\u00ed\u0003\u0014\n\u0000\u00ea\u00eb\u0007\u0000\u0000\u0000\u00eb\u00ed"+ - "\u0003\u0012\t\u0003\u00ec\u00e8\u0001\u0000\u0000\u0000\u00ec\u00ea\u0001"+ - "\u0000\u0000\u0000\u00ed\u00f6\u0001\u0000\u0000\u0000\u00ee\u00ef\n\u0002"+ - "\u0000\u0000\u00ef\u00f0\u0007\u0001\u0000\u0000\u00f0\u00f5\u0003\u0012"+ - "\t\u0003\u00f1\u00f2\n\u0001\u0000\u0000\u00f2\u00f3\u0007\u0000\u0000"+ - "\u0000\u00f3\u00f5\u0003\u0012\t\u0002\u00f4\u00ee\u0001\u0000\u0000\u0000"+ - "\u00f4\u00f1\u0001\u0000\u0000\u0000\u00f5\u00f8\u0001\u0000\u0000\u0000"+ - "\u00f6\u00f4\u0001\u0000\u0000\u0000\u00f6\u00f7\u0001\u0000\u0000\u0000"+ - "\u00f7\u0013\u0001\u0000\u0000\u0000\u00f8\u00f6\u0001\u0000\u0000\u0000"+ - "\u00f9\u00fa\u0006\n\uffff\uffff\u0000\u00fa\u0102\u0003>\u001f\u0000"+ - "\u00fb\u0102\u00034\u001a\u0000\u00fc\u0102\u0003\u0016\u000b\u0000\u00fd"+ - "\u00fe\u0005,\u0000\u0000\u00fe\u00ff\u0003\n\u0005\u0000\u00ff\u0100"+ - "\u00053\u0000\u0000\u0100\u0102\u0001\u0000\u0000\u0000\u0101\u00f9\u0001"+ - "\u0000\u0000\u0000\u0101\u00fb\u0001\u0000\u0000\u0000\u0101\u00fc\u0001"+ - "\u0000\u0000\u0000\u0101\u00fd\u0001\u0000\u0000\u0000\u0102\u0108\u0001"+ - "\u0000\u0000\u0000\u0103\u0104\n\u0001\u0000\u0000\u0104\u0105\u0005\""+ - "\u0000\u0000\u0105\u0107\u0003\u0018\f\u0000\u0106\u0103\u0001\u0000\u0000"+ - "\u0000\u0107\u010a\u0001\u0000\u0000\u0000\u0108\u0106\u0001\u0000\u0000"+ - "\u0000\u0108\u0109\u0001\u0000\u0000\u0000\u0109\u0015\u0001\u0000\u0000"+ - "\u0000\u010a\u0108\u0001\u0000\u0000\u0000\u010b\u010c\u0003:\u001d\u0000"+ - "\u010c\u0116\u0005,\u0000\u0000\u010d\u0117\u0005>\u0000\u0000\u010e\u0113"+ - "\u0003\n\u0005\u0000\u010f\u0110\u0005#\u0000\u0000\u0110\u0112\u0003"+ - "\n\u0005\u0000\u0111\u010f\u0001\u0000\u0000\u0000\u0112\u0115\u0001\u0000"+ - "\u0000\u0000\u0113\u0111\u0001\u0000\u0000\u0000\u0113\u0114\u0001\u0000"+ - "\u0000\u0000\u0114\u0117\u0001\u0000\u0000\u0000\u0115\u0113\u0001\u0000"+ - "\u0000\u0000\u0116\u010d\u0001\u0000\u0000\u0000\u0116\u010e\u0001\u0000"+ - "\u0000\u0000\u0116\u0117\u0001\u0000\u0000\u0000\u0117\u0118\u0001\u0000"+ - "\u0000\u0000\u0118\u0119\u00053\u0000\u0000\u0119\u0017\u0001\u0000\u0000"+ - "\u0000\u011a\u011b\u0003:\u001d\u0000\u011b\u0019\u0001\u0000\u0000\u0000"+ - "\u011c\u011d\u0005\r\u0000\u0000\u011d\u011e\u0003\u001c\u000e\u0000\u011e"+ - "\u001b\u0001\u0000\u0000\u0000\u011f\u0124\u0003\u001e\u000f\u0000\u0120"+ - "\u0121\u0005#\u0000\u0000\u0121\u0123\u0003\u001e\u000f\u0000\u0122\u0120"+ - "\u0001\u0000\u0000\u0000\u0123\u0126\u0001\u0000\u0000\u0000\u0124\u0122"+ - "\u0001\u0000\u0000\u0000\u0124\u0125\u0001\u0000\u0000\u0000\u0125\u001d"+ - "\u0001\u0000\u0000\u0000\u0126\u0124\u0001\u0000\u0000\u0000\u0127\u012d"+ - "\u0003\n\u0005\u0000\u0128\u0129\u00034\u001a\u0000\u0129\u012a\u0005"+ - "!\u0000\u0000\u012a\u012b\u0003\n\u0005\u0000\u012b\u012d\u0001\u0000"+ - "\u0000\u0000\u012c\u0127\u0001\u0000\u0000\u0000\u012c\u0128\u0001\u0000"+ - "\u0000\u0000\u012d\u001f\u0001\u0000\u0000\u0000\u012e\u012f\u0005\u0006"+ - "\u0000\u0000\u012f\u0134\u0003\"\u0011\u0000\u0130\u0131\u0005#\u0000"+ - "\u0000\u0131\u0133\u0003\"\u0011\u0000\u0132\u0130\u0001\u0000\u0000\u0000"+ - "\u0133\u0136\u0001\u0000\u0000\u0000\u0134\u0132\u0001\u0000\u0000\u0000"+ - "\u0134\u0135\u0001\u0000\u0000\u0000\u0135\u0138\u0001\u0000\u0000\u0000"+ - "\u0136\u0134\u0001\u0000\u0000\u0000\u0137\u0139\u0003(\u0014\u0000\u0138"+ - "\u0137\u0001\u0000\u0000\u0000\u0138\u0139\u0001\u0000\u0000\u0000\u0139"+ - "!\u0001\u0000\u0000\u0000\u013a\u013b\u0003$\u0012\u0000\u013b\u013c\u0005"+ - "m\u0000\u0000\u013c\u013d\u0003&\u0013\u0000\u013d\u0140\u0001\u0000\u0000"+ - "\u0000\u013e\u0140\u0003&\u0013\u0000\u013f\u013a\u0001\u0000\u0000\u0000"+ - "\u013f\u013e\u0001\u0000\u0000\u0000\u0140#\u0001\u0000\u0000\u0000\u0141"+ - "\u0142\u0005M\u0000\u0000\u0142%\u0001\u0000\u0000\u0000\u0143\u0144\u0007"+ - "\u0002\u0000\u0000\u0144\'\u0001\u0000\u0000\u0000\u0145\u0148\u0003*"+ - "\u0015\u0000\u0146\u0148\u0003,\u0016\u0000\u0147\u0145\u0001\u0000\u0000"+ - "\u0000\u0147\u0146\u0001\u0000\u0000\u0000\u0148)\u0001\u0000\u0000\u0000"+ - "\u0149\u014a\u0005L\u0000\u0000\u014a\u014f\u0005M\u0000\u0000\u014b\u014c"+ - "\u0005#\u0000\u0000\u014c\u014e\u0005M\u0000\u0000\u014d\u014b\u0001\u0000"+ - "\u0000\u0000\u014e\u0151\u0001\u0000\u0000\u0000\u014f\u014d\u0001\u0000"+ - "\u0000\u0000\u014f\u0150\u0001\u0000\u0000\u0000\u0150+\u0001\u0000\u0000"+ - "\u0000\u0151\u014f\u0001\u0000\u0000\u0000\u0152\u0153\u0005B\u0000\u0000"+ - "\u0153\u0154\u0003*\u0015\u0000\u0154\u0155\u0005C\u0000\u0000\u0155-"+ - "\u0001\u0000\u0000\u0000\u0156\u0157\u0005\u0015\u0000\u0000\u0157\u015c"+ - "\u0003\"\u0011\u0000\u0158\u0159\u0005#\u0000\u0000\u0159\u015b\u0003"+ - "\"\u0011\u0000\u015a\u0158\u0001\u0000\u0000\u0000\u015b\u015e\u0001\u0000"+ - "\u0000\u0000\u015c\u015a\u0001\u0000\u0000\u0000\u015c\u015d\u0001\u0000"+ - "\u0000\u0000\u015d\u0160\u0001\u0000\u0000\u0000\u015e\u015c\u0001\u0000"+ - "\u0000\u0000\u015f\u0161\u0003\u001c\u000e\u0000\u0160\u015f\u0001\u0000"+ - "\u0000\u0000\u0160\u0161\u0001\u0000\u0000\u0000\u0161\u0164\u0001\u0000"+ - "\u0000\u0000\u0162\u0163\u0005\u001e\u0000\u0000\u0163\u0165\u0003\u001c"+ - "\u000e\u0000\u0164\u0162\u0001\u0000\u0000\u0000\u0164\u0165\u0001\u0000"+ - "\u0000\u0000\u0165/\u0001\u0000\u0000\u0000\u0166\u0167\u0005\u0004\u0000"+ - "\u0000\u0167\u0168\u0003\u001c\u000e\u0000\u01681\u0001\u0000\u0000\u0000"+ - "\u0169\u016b\u0005\u0010\u0000\u0000\u016a\u016c\u0003\u001c\u000e\u0000"+ - "\u016b\u016a\u0001\u0000\u0000\u0000\u016b\u016c\u0001\u0000\u0000\u0000"+ - "\u016c\u016f\u0001\u0000\u0000\u0000\u016d\u016e\u0005\u001e\u0000\u0000"+ - "\u016e\u0170\u0003\u001c\u000e\u0000\u016f\u016d\u0001\u0000\u0000\u0000"+ - "\u016f\u0170\u0001\u0000\u0000\u0000\u01703\u0001\u0000\u0000\u0000\u0171"+ - "\u0176\u0003:\u001d\u0000\u0172\u0173\u0005%\u0000\u0000\u0173\u0175\u0003"+ - ":\u001d\u0000\u0174\u0172\u0001\u0000\u0000\u0000\u0175\u0178\u0001\u0000"+ - "\u0000\u0000\u0176\u0174\u0001\u0000\u0000\u0000\u0176\u0177\u0001\u0000"+ - "\u0000\u0000\u01775\u0001\u0000\u0000\u0000\u0178\u0176\u0001\u0000\u0000"+ - "\u0000\u0179\u017e\u0003<\u001e\u0000\u017a\u017b\u0005%\u0000\u0000\u017b"+ - "\u017d\u0003<\u001e\u0000\u017c\u017a\u0001\u0000\u0000\u0000\u017d\u0180"+ - "\u0001\u0000\u0000\u0000\u017e\u017c\u0001\u0000\u0000\u0000\u017e\u017f"+ - "\u0001\u0000\u0000\u0000\u017f7\u0001\u0000\u0000\u0000\u0180\u017e\u0001"+ - "\u0000\u0000\u0000\u0181\u0186\u00036\u001b\u0000\u0182\u0183\u0005#\u0000"+ - "\u0000\u0183\u0185\u00036\u001b\u0000\u0184\u0182\u0001\u0000\u0000\u0000"+ - "\u0185\u0188\u0001\u0000\u0000\u0000\u0186\u0184\u0001\u0000\u0000\u0000"+ - "\u0186\u0187\u0001\u0000\u0000\u0000\u01879\u0001\u0000\u0000\u0000\u0188"+ - "\u0186\u0001\u0000\u0000\u0000\u0189\u018a\u0007\u0003\u0000\u0000\u018a"+ - ";\u0001\u0000\u0000\u0000\u018b\u018c\u0005Q\u0000\u0000\u018c=\u0001"+ - "\u0000\u0000\u0000\u018d\u01b8\u0005.\u0000\u0000\u018e\u018f\u0003`0"+ - "\u0000\u018f\u0190\u0005D\u0000\u0000\u0190\u01b8\u0001\u0000\u0000\u0000"+ - "\u0191\u01b8\u0003^/\u0000\u0192\u01b8\u0003`0\u0000\u0193\u01b8\u0003"+ - "Z-\u0000\u0194\u01b8\u0003@ \u0000\u0195\u01b8\u0003b1\u0000\u0196\u0197"+ - "\u0005B\u0000\u0000\u0197\u019c\u0003\\.\u0000\u0198\u0199\u0005#\u0000"+ - "\u0000\u0199\u019b\u0003\\.\u0000\u019a\u0198\u0001\u0000\u0000\u0000"+ - "\u019b\u019e\u0001\u0000\u0000\u0000\u019c\u019a\u0001\u0000\u0000\u0000"+ - "\u019c\u019d\u0001\u0000\u0000\u0000\u019d\u019f\u0001\u0000\u0000\u0000"+ - "\u019e\u019c\u0001\u0000\u0000\u0000\u019f\u01a0\u0005C\u0000\u0000\u01a0"+ - "\u01b8\u0001\u0000\u0000\u0000\u01a1\u01a2\u0005B\u0000\u0000\u01a2\u01a7"+ - "\u0003Z-\u0000\u01a3\u01a4\u0005#\u0000\u0000\u01a4\u01a6\u0003Z-\u0000"+ - "\u01a5\u01a3\u0001\u0000\u0000\u0000\u01a6\u01a9\u0001\u0000\u0000\u0000"+ - "\u01a7\u01a5\u0001\u0000\u0000\u0000\u01a7\u01a8\u0001\u0000\u0000\u0000"+ - "\u01a8\u01aa\u0001\u0000\u0000\u0000\u01a9\u01a7\u0001\u0000\u0000\u0000"+ - "\u01aa\u01ab\u0005C\u0000\u0000\u01ab\u01b8\u0001\u0000\u0000\u0000\u01ac"+ - "\u01ad\u0005B\u0000\u0000\u01ad\u01b2\u0003b1\u0000\u01ae\u01af\u0005"+ - "#\u0000\u0000\u01af\u01b1\u0003b1\u0000\u01b0\u01ae\u0001\u0000\u0000"+ - "\u0000\u01b1\u01b4\u0001\u0000\u0000\u0000\u01b2\u01b0\u0001\u0000\u0000"+ - "\u0000\u01b2\u01b3\u0001\u0000\u0000\u0000\u01b3\u01b5\u0001\u0000\u0000"+ - "\u0000\u01b4\u01b2\u0001\u0000\u0000\u0000\u01b5\u01b6\u0005C\u0000\u0000"+ - "\u01b6\u01b8\u0001\u0000\u0000\u0000\u01b7\u018d\u0001\u0000\u0000\u0000"+ - "\u01b7\u018e\u0001\u0000\u0000\u0000\u01b7\u0191\u0001\u0000\u0000\u0000"+ - "\u01b7\u0192\u0001\u0000\u0000\u0000\u01b7\u0193\u0001\u0000\u0000\u0000"+ - "\u01b7\u0194\u0001\u0000\u0000\u0000\u01b7\u0195\u0001\u0000\u0000\u0000"+ - "\u01b7\u0196\u0001\u0000\u0000\u0000\u01b7\u01a1\u0001\u0000\u0000\u0000"+ - "\u01b7\u01ac\u0001\u0000\u0000\u0000\u01b8?\u0001\u0000\u0000\u0000\u01b9"+ - "\u01bc\u00051\u0000\u0000\u01ba\u01bc\u0005A\u0000\u0000\u01bb\u01b9\u0001"+ - "\u0000\u0000\u0000\u01bb\u01ba\u0001\u0000\u0000\u0000\u01bcA\u0001\u0000"+ - "\u0000\u0000\u01bd\u01be\u0005\t\u0000\u0000\u01be\u01bf\u0005\u001c\u0000"+ - "\u0000\u01bfC\u0001\u0000\u0000\u0000\u01c0\u01c1\u0005\u000f\u0000\u0000"+ - "\u01c1\u01c6\u0003F#\u0000\u01c2\u01c3\u0005#\u0000\u0000\u01c3\u01c5"+ - "\u0003F#\u0000\u01c4\u01c2\u0001\u0000\u0000\u0000\u01c5\u01c8\u0001\u0000"+ - "\u0000\u0000\u01c6\u01c4\u0001\u0000\u0000\u0000\u01c6\u01c7\u0001\u0000"+ - "\u0000\u0000\u01c7E\u0001\u0000\u0000\u0000\u01c8\u01c6\u0001\u0000\u0000"+ - "\u0000\u01c9\u01cb\u0003\n\u0005\u0000\u01ca\u01cc\u0007\u0004\u0000\u0000"+ - "\u01cb\u01ca\u0001\u0000\u0000\u0000\u01cb\u01cc\u0001\u0000\u0000\u0000"+ - "\u01cc\u01cf\u0001\u0000\u0000\u0000\u01cd\u01ce\u0005/\u0000\u0000\u01ce"+ - "\u01d0\u0007\u0005\u0000\u0000\u01cf\u01cd\u0001\u0000\u0000\u0000\u01cf"+ - "\u01d0\u0001\u0000\u0000\u0000\u01d0G\u0001\u0000\u0000\u0000\u01d1\u01d2"+ - "\u0005\b\u0000\u0000\u01d2\u01d3\u00038\u001c\u0000\u01d3I\u0001\u0000"+ - "\u0000\u0000\u01d4\u01d5\u0005\u0002\u0000\u0000\u01d5\u01d6\u00038\u001c"+ - "\u0000\u01d6K\u0001\u0000\u0000\u0000\u01d7\u01d8\u0005\f\u0000\u0000"+ - "\u01d8\u01dd\u0003N\'\u0000\u01d9\u01da\u0005#\u0000\u0000\u01da\u01dc"+ - "\u0003N\'\u0000\u01db\u01d9\u0001\u0000\u0000\u0000\u01dc\u01df\u0001"+ - "\u0000\u0000\u0000\u01dd\u01db\u0001\u0000\u0000\u0000\u01dd\u01de\u0001"+ - "\u0000\u0000\u0000\u01deM\u0001\u0000\u0000\u0000\u01df\u01dd\u0001\u0000"+ - "\u0000\u0000\u01e0\u01e1\u00036\u001b\u0000\u01e1\u01e2\u0005U\u0000\u0000"+ - "\u01e2\u01e3\u00036\u001b\u0000\u01e3O\u0001\u0000\u0000\u0000\u01e4\u01e5"+ - "\u0005\u0001\u0000\u0000\u01e5\u01e6\u0003\u0014\n\u0000\u01e6\u01e8\u0003"+ - "b1\u0000\u01e7\u01e9\u0003V+\u0000\u01e8\u01e7\u0001\u0000\u0000\u0000"+ - "\u01e8\u01e9\u0001\u0000\u0000\u0000\u01e9Q\u0001\u0000\u0000\u0000\u01ea"+ - "\u01eb\u0005\u0007\u0000\u0000\u01eb\u01ec\u0003\u0014\n\u0000\u01ec\u01ed"+ - "\u0003b1\u0000\u01edS\u0001\u0000\u0000\u0000\u01ee\u01ef\u0005\u000b"+ - "\u0000\u0000\u01ef\u01f0\u00034\u001a\u0000\u01f0U\u0001\u0000\u0000\u0000"+ - "\u01f1\u01f6\u0003X,\u0000\u01f2\u01f3\u0005#\u0000\u0000\u01f3\u01f5"+ - "\u0003X,\u0000\u01f4\u01f2\u0001\u0000\u0000\u0000\u01f5\u01f8\u0001\u0000"+ - "\u0000\u0000\u01f6\u01f4\u0001\u0000\u0000\u0000\u01f6\u01f7\u0001\u0000"+ - "\u0000\u0000\u01f7W\u0001\u0000\u0000\u0000\u01f8\u01f6\u0001\u0000\u0000"+ - "\u0000\u01f9\u01fa\u0003:\u001d\u0000\u01fa\u01fb\u0005!\u0000\u0000\u01fb"+ - "\u01fc\u0003>\u001f\u0000\u01fcY\u0001\u0000\u0000\u0000\u01fd\u01fe\u0007"+ - "\u0006\u0000\u0000\u01fe[\u0001\u0000\u0000\u0000\u01ff\u0202\u0003^/"+ - "\u0000\u0200\u0202\u0003`0\u0000\u0201\u01ff\u0001\u0000\u0000\u0000\u0201"+ - "\u0200\u0001\u0000\u0000\u0000\u0202]\u0001\u0000\u0000\u0000\u0203\u0205"+ - "\u0007\u0000\u0000\u0000\u0204\u0203\u0001\u0000\u0000\u0000\u0204\u0205"+ - "\u0001\u0000\u0000\u0000\u0205\u0206\u0001\u0000\u0000\u0000\u0206\u0207"+ - "\u0005\u001d\u0000\u0000\u0207_\u0001\u0000\u0000\u0000\u0208\u020a\u0007"+ - "\u0000\u0000\u0000\u0209\u0208\u0001\u0000\u0000\u0000\u0209\u020a\u0001"+ - "\u0000\u0000\u0000\u020a\u020b\u0001\u0000\u0000\u0000\u020b\u020c\u0005"+ - "\u001c\u0000\u0000\u020ca\u0001\u0000\u0000\u0000\u020d\u020e\u0005\u001b"+ - "\u0000\u0000\u020ec\u0001\u0000\u0000\u0000\u020f\u0210\u0007\u0007\u0000"+ - "\u0000\u0210e\u0001\u0000\u0000\u0000\u0211\u0212\u0005\u0005\u0000\u0000"+ - "\u0212\u0213\u0003h4\u0000\u0213g\u0001\u0000\u0000\u0000\u0214\u0215"+ - "\u0005B\u0000\u0000\u0215\u0216\u0003\u0002\u0001\u0000\u0216\u0217\u0005"+ - "C\u0000\u0000\u0217i\u0001\u0000\u0000\u0000\u0218\u0219\u0005\u000e\u0000"+ - "\u0000\u0219\u021a\u0005e\u0000\u0000\u021ak\u0001\u0000\u0000\u0000\u021b"+ - "\u021c\u0005\n\u0000\u0000\u021c\u021d\u0005i\u0000\u0000\u021dm\u0001"+ - "\u0000\u0000\u0000\u021e\u021f\u0005\u0003\u0000\u0000\u021f\u0222\u0005"+ - "[\u0000\u0000\u0220\u0221\u0005Y\u0000\u0000\u0221\u0223\u00036\u001b"+ - "\u0000\u0222\u0220\u0001\u0000\u0000\u0000\u0222\u0223\u0001\u0000\u0000"+ - "\u0000\u0223\u022d\u0001\u0000\u0000\u0000\u0224\u0225\u0005Z\u0000\u0000"+ - "\u0225\u022a\u0003p8\u0000\u0226\u0227\u0005#\u0000\u0000\u0227\u0229"+ - "\u0003p8\u0000\u0228\u0226\u0001\u0000\u0000\u0000\u0229\u022c\u0001\u0000"+ - "\u0000\u0000\u022a\u0228\u0001\u0000\u0000\u0000\u022a\u022b\u0001\u0000"+ - "\u0000\u0000\u022b\u022e\u0001\u0000\u0000\u0000\u022c\u022a\u0001\u0000"+ - "\u0000\u0000\u022d\u0224\u0001\u0000\u0000\u0000\u022d\u022e\u0001\u0000"+ - "\u0000\u0000\u022eo\u0001\u0000\u0000\u0000\u022f\u0230\u00036\u001b\u0000"+ - "\u0230\u0231\u0005!\u0000\u0000\u0231\u0233\u0001\u0000\u0000\u0000\u0232"+ - "\u022f\u0001\u0000\u0000\u0000\u0232\u0233\u0001\u0000\u0000\u0000\u0233"+ - "\u0234\u0001\u0000\u0000\u0000\u0234\u0235\u00036\u001b\u0000\u0235q\u0001"+ - "\u0000\u0000\u0000\u0236\u0237\u0005\u0013\u0000\u0000\u0237\u0238\u0003"+ - "\"\u0011\u0000\u0238\u0239\u0005Y\u0000\u0000\u0239\u023a\u00038\u001c"+ - "\u0000\u023as\u0001\u0000\u0000\u0000\u023b\u023c\u0005\u0012\u0000\u0000"+ - "\u023c\u023f\u0003\u001c\u000e\u0000\u023d\u023e\u0005\u001e\u0000\u0000"+ - "\u023e\u0240\u0003\u001c\u000e\u0000\u023f\u023d\u0001\u0000\u0000\u0000"+ - "\u023f\u0240\u0001\u0000\u0000\u0000\u0240u\u0001\u0000\u0000\u00006\u0081"+ - "\u008b\u009d\u00a9\u00b2\u00ba\u00c0\u00c8\u00ca\u00cf\u00d6\u00db\u00e6"+ - "\u00ec\u00f4\u00f6\u0101\u0108\u0113\u0116\u0124\u012c\u0134\u0138\u013f"+ - "\u0147\u014f\u015c\u0160\u0164\u016b\u016f\u0176\u017e\u0186\u019c\u01a7"+ - "\u01b2\u01b7\u01bb\u01c6\u01cb\u01cf\u01dd\u01e8\u01f6\u0201\u0204\u0209"+ - "\u0222\u022a\u022d\u0232\u023f"; + "\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u0198\b\u001f"+ + "\n\u001f\f\u001f\u019b\t\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001"+ + "\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u01a3\b\u001f\n\u001f\f\u001f"+ + "\u01a6\t\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f"+ + "\u0001\u001f\u0005\u001f\u01ae\b\u001f\n\u001f\f\u001f\u01b1\t\u001f\u0001"+ + "\u001f\u0001\u001f\u0003\u001f\u01b5\b\u001f\u0001 \u0001 \u0003 \u01b9"+ + "\b \u0001!\u0001!\u0001!\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01c2"+ + "\b\"\n\"\f\"\u01c5\t\"\u0001#\u0001#\u0003#\u01c9\b#\u0001#\u0001#\u0003"+ + "#\u01cd\b#\u0001$\u0001$\u0001$\u0001%\u0001%\u0001%\u0001&\u0001&\u0001"+ + "&\u0001&\u0005&\u01d9\b&\n&\f&\u01dc\t&\u0001\'\u0001\'\u0001\'\u0001"+ + "\'\u0001(\u0001(\u0001(\u0001(\u0003(\u01e6\b(\u0001)\u0001)\u0001)\u0001"+ + ")\u0001*\u0001*\u0001*\u0001+\u0001+\u0001+\u0005+\u01f2\b+\n+\f+\u01f5"+ + "\t+\u0001,\u0001,\u0001,\u0001,\u0001-\u0001-\u0001.\u0001.\u0003.\u01ff"+ + "\b.\u0001/\u0003/\u0202\b/\u0001/\u0001/\u00010\u00030\u0207\b0\u0001"+ + "0\u00010\u00011\u00011\u00012\u00012\u00013\u00013\u00013\u00014\u0001"+ + "4\u00014\u00014\u00015\u00015\u00015\u00016\u00016\u00016\u00016\u0003"+ + "6\u021d\b6\u00016\u00016\u00016\u00016\u00056\u0223\b6\n6\f6\u0226\t6"+ + "\u00036\u0228\b6\u00017\u00017\u00017\u00037\u022d\b7\u00017\u00017\u0001"+ + "8\u00018\u00018\u00018\u00018\u00019\u00019\u00019\u00019\u00039\u023a"+ + "\b9\u00019\u0000\u0004\u0002\n\u0012\u0014:\u0000\u0002\u0004\u0006\b"+ + "\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02"+ + "468:<>@BDFHJLNPRTVXZ\\^`bdfhjlnpr\u0000\b\u0001\u0000;<\u0001\u0000=?"+ + "\u0002\u0000\u001a\u001aLL\u0001\u0000CD\u0002\u0000\u001f\u001f##\u0002"+ + "\u0000&&))\u0002\u0000%%33\u0002\u0000446:\u0255\u0000t\u0001\u0000\u0000"+ + "\u0000\u0002w\u0001\u0000\u0000\u0000\u0004\u0088\u0001\u0000\u0000\u0000"+ + "\u0006\u009a\u0001\u0000\u0000\u0000\b\u009c\u0001\u0000\u0000\u0000\n"+ + "\u00bd\u0001\u0000\u0000\u0000\f\u00d8\u0001\u0000\u0000\u0000\u000e\u00da"+ + "\u0001\u0000\u0000\u0000\u0010\u00e3\u0001\u0000\u0000\u0000\u0012\u00e9"+ + "\u0001\u0000\u0000\u0000\u0014\u00fe\u0001\u0000\u0000\u0000\u0016\u0108"+ + "\u0001\u0000\u0000\u0000\u0018\u0117\u0001\u0000\u0000\u0000\u001a\u0119"+ + "\u0001\u0000\u0000\u0000\u001c\u011c\u0001\u0000\u0000\u0000\u001e\u0129"+ + "\u0001\u0000\u0000\u0000 \u012b\u0001\u0000\u0000\u0000\"\u013c\u0001"+ + "\u0000\u0000\u0000$\u013e\u0001\u0000\u0000\u0000&\u0140\u0001\u0000\u0000"+ + "\u0000(\u0144\u0001\u0000\u0000\u0000*\u0146\u0001\u0000\u0000\u0000,"+ + "\u014f\u0001\u0000\u0000\u0000.\u0153\u0001\u0000\u0000\u00000\u0163\u0001"+ + "\u0000\u0000\u00002\u0166\u0001\u0000\u0000\u00004\u016e\u0001\u0000\u0000"+ + "\u00006\u0176\u0001\u0000\u0000\u00008\u017e\u0001\u0000\u0000\u0000:"+ + "\u0186\u0001\u0000\u0000\u0000<\u0188\u0001\u0000\u0000\u0000>\u01b4\u0001"+ + "\u0000\u0000\u0000@\u01b8\u0001\u0000\u0000\u0000B\u01ba\u0001\u0000\u0000"+ + "\u0000D\u01bd\u0001\u0000\u0000\u0000F\u01c6\u0001\u0000\u0000\u0000H"+ + "\u01ce\u0001\u0000\u0000\u0000J\u01d1\u0001\u0000\u0000\u0000L\u01d4\u0001"+ + "\u0000\u0000\u0000N\u01dd\u0001\u0000\u0000\u0000P\u01e1\u0001\u0000\u0000"+ + "\u0000R\u01e7\u0001\u0000\u0000\u0000T\u01eb\u0001\u0000\u0000\u0000V"+ + "\u01ee\u0001\u0000\u0000\u0000X\u01f6\u0001\u0000\u0000\u0000Z\u01fa\u0001"+ + "\u0000\u0000\u0000\\\u01fe\u0001\u0000\u0000\u0000^\u0201\u0001\u0000"+ + "\u0000\u0000`\u0206\u0001\u0000\u0000\u0000b\u020a\u0001\u0000\u0000\u0000"+ + "d\u020c\u0001\u0000\u0000\u0000f\u020e\u0001\u0000\u0000\u0000h\u0211"+ + "\u0001\u0000\u0000\u0000j\u0215\u0001\u0000\u0000\u0000l\u0218\u0001\u0000"+ + "\u0000\u0000n\u022c\u0001\u0000\u0000\u0000p\u0230\u0001\u0000\u0000\u0000"+ + "r\u0235\u0001\u0000\u0000\u0000tu\u0003\u0002\u0001\u0000uv\u0005\u0000"+ + "\u0000\u0001v\u0001\u0001\u0000\u0000\u0000wx\u0006\u0001\uffff\uffff"+ + "\u0000xy\u0003\u0004\u0002\u0000y\u007f\u0001\u0000\u0000\u0000z{\n\u0001"+ + "\u0000\u0000{|\u0005\u0019\u0000\u0000|~\u0003\u0006\u0003\u0000}z\u0001"+ + "\u0000\u0000\u0000~\u0081\u0001\u0000\u0000\u0000\u007f}\u0001\u0000\u0000"+ + "\u0000\u007f\u0080\u0001\u0000\u0000\u0000\u0080\u0003\u0001\u0000\u0000"+ + "\u0000\u0081\u007f\u0001\u0000\u0000\u0000\u0082\u0089\u0003f3\u0000\u0083"+ + "\u0089\u0003 \u0010\u0000\u0084\u0089\u0003\u001a\r\u0000\u0085\u0089"+ + "\u0003j5\u0000\u0086\u0087\u0004\u0002\u0001\u0000\u0087\u0089\u0003."+ + "\u0017\u0000\u0088\u0082\u0001\u0000\u0000\u0000\u0088\u0083\u0001\u0000"+ + "\u0000\u0000\u0088\u0084\u0001\u0000\u0000\u0000\u0088\u0085\u0001\u0000"+ + "\u0000\u0000\u0088\u0086\u0001\u0000\u0000\u0000\u0089\u0005\u0001\u0000"+ + "\u0000\u0000\u008a\u009b\u00030\u0018\u0000\u008b\u009b\u0003\b\u0004"+ + "\u0000\u008c\u009b\u0003H$\u0000\u008d\u009b\u0003B!\u0000\u008e\u009b"+ + "\u00032\u0019\u0000\u008f\u009b\u0003D\"\u0000\u0090\u009b\u0003J%\u0000"+ + "\u0091\u009b\u0003L&\u0000\u0092\u009b\u0003P(\u0000\u0093\u009b\u0003"+ + "R)\u0000\u0094\u009b\u0003l6\u0000\u0095\u009b\u0003T*\u0000\u0096\u0097"+ + "\u0004\u0003\u0002\u0000\u0097\u009b\u0003r9\u0000\u0098\u0099\u0004\u0003"+ + "\u0003\u0000\u0099\u009b\u0003p8\u0000\u009a\u008a\u0001\u0000\u0000\u0000"+ + "\u009a\u008b\u0001\u0000\u0000\u0000\u009a\u008c\u0001\u0000\u0000\u0000"+ + "\u009a\u008d\u0001\u0000\u0000\u0000\u009a\u008e\u0001\u0000\u0000\u0000"+ + "\u009a\u008f\u0001\u0000\u0000\u0000\u009a\u0090\u0001\u0000\u0000\u0000"+ + "\u009a\u0091\u0001\u0000\u0000\u0000\u009a\u0092\u0001\u0000\u0000\u0000"+ + "\u009a\u0093\u0001\u0000\u0000\u0000\u009a\u0094\u0001\u0000\u0000\u0000"+ + "\u009a\u0095\u0001\u0000\u0000\u0000\u009a\u0096\u0001\u0000\u0000\u0000"+ + "\u009a\u0098\u0001\u0000\u0000\u0000\u009b\u0007\u0001\u0000\u0000\u0000"+ + "\u009c\u009d\u0005\u0010\u0000\u0000\u009d\u009e\u0003\n\u0005\u0000\u009e"+ + "\t\u0001\u0000\u0000\u0000\u009f\u00a0\u0006\u0005\uffff\uffff\u0000\u00a0"+ + "\u00a1\u0005,\u0000\u0000\u00a1\u00be\u0003\n\u0005\b\u00a2\u00be\u0003"+ + "\u0010\b\u0000\u00a3\u00be\u0003\f\u0006\u0000\u00a4\u00a6\u0003\u0010"+ + "\b\u0000\u00a5\u00a7\u0005,\u0000\u0000\u00a6\u00a5\u0001\u0000\u0000"+ + "\u0000\u00a6\u00a7\u0001\u0000\u0000\u0000\u00a7\u00a8\u0001\u0000\u0000"+ + "\u0000\u00a8\u00a9\u0005\'\u0000\u0000\u00a9\u00aa\u0005+\u0000\u0000"+ + "\u00aa\u00af\u0003\u0010\b\u0000\u00ab\u00ac\u0005\"\u0000\u0000\u00ac"+ + "\u00ae\u0003\u0010\b\u0000\u00ad\u00ab\u0001\u0000\u0000\u0000\u00ae\u00b1"+ + "\u0001\u0000\u0000\u0000\u00af\u00ad\u0001\u0000\u0000\u0000\u00af\u00b0"+ + "\u0001\u0000\u0000\u0000\u00b0\u00b2\u0001\u0000\u0000\u0000\u00b1\u00af"+ + "\u0001\u0000\u0000\u0000\u00b2\u00b3\u00052\u0000\u0000\u00b3\u00be\u0001"+ + "\u0000\u0000\u0000\u00b4\u00b5\u0003\u0010\b\u0000\u00b5\u00b7\u0005("+ + "\u0000\u0000\u00b6\u00b8\u0005,\u0000\u0000\u00b7\u00b6\u0001\u0000\u0000"+ + "\u0000\u00b7\u00b8\u0001\u0000\u0000\u0000\u00b8\u00b9\u0001\u0000\u0000"+ + "\u0000\u00b9\u00ba\u0005-\u0000\u0000\u00ba\u00be\u0001\u0000\u0000\u0000"+ + "\u00bb\u00bc\u0004\u0005\u0004\u0000\u00bc\u00be\u0003\u000e\u0007\u0000"+ + "\u00bd\u009f\u0001\u0000\u0000\u0000\u00bd\u00a2\u0001\u0000\u0000\u0000"+ + "\u00bd\u00a3\u0001\u0000\u0000\u0000\u00bd\u00a4\u0001\u0000\u0000\u0000"+ + "\u00bd\u00b4\u0001\u0000\u0000\u0000\u00bd\u00bb\u0001\u0000\u0000\u0000"+ + "\u00be\u00c7\u0001\u0000\u0000\u0000\u00bf\u00c0\n\u0005\u0000\u0000\u00c0"+ + "\u00c1\u0005\u001e\u0000\u0000\u00c1\u00c6\u0003\n\u0005\u0006\u00c2\u00c3"+ + "\n\u0004\u0000\u0000\u00c3\u00c4\u0005/\u0000\u0000\u00c4\u00c6\u0003"+ + "\n\u0005\u0005\u00c5\u00bf\u0001\u0000\u0000\u0000\u00c5\u00c2\u0001\u0000"+ + "\u0000\u0000\u00c6\u00c9\u0001\u0000\u0000\u0000\u00c7\u00c5\u0001\u0000"+ + "\u0000\u0000\u00c7\u00c8\u0001\u0000\u0000\u0000\u00c8\u000b\u0001\u0000"+ + "\u0000\u0000\u00c9\u00c7\u0001\u0000\u0000\u0000\u00ca\u00cc\u0003\u0010"+ + "\b\u0000\u00cb\u00cd\u0005,\u0000\u0000\u00cc\u00cb\u0001\u0000\u0000"+ + "\u0000\u00cc\u00cd\u0001\u0000\u0000\u0000\u00cd\u00ce\u0001\u0000\u0000"+ + "\u0000\u00ce\u00cf\u0005*\u0000\u0000\u00cf\u00d0\u0003b1\u0000\u00d0"+ + "\u00d9\u0001\u0000\u0000\u0000\u00d1\u00d3\u0003\u0010\b\u0000\u00d2\u00d4"+ + "\u0005,\u0000\u0000\u00d3\u00d2\u0001\u0000\u0000\u0000\u00d3\u00d4\u0001"+ + "\u0000\u0000\u0000\u00d4\u00d5\u0001\u0000\u0000\u0000\u00d5\u00d6\u0005"+ + "1\u0000\u0000\u00d6\u00d7\u0003b1\u0000\u00d7\u00d9\u0001\u0000\u0000"+ + "\u0000\u00d8\u00ca\u0001\u0000\u0000\u0000\u00d8\u00d1\u0001\u0000\u0000"+ + "\u0000\u00d9\r\u0001\u0000\u0000\u0000\u00da\u00db\u0003\u0010\b\u0000"+ + "\u00db\u00dc\u0005\u0013\u0000\u0000\u00dc\u00dd\u0003b1\u0000\u00dd\u000f"+ + "\u0001\u0000\u0000\u0000\u00de\u00e4\u0003\u0012\t\u0000\u00df\u00e0\u0003"+ + "\u0012\t\u0000\u00e0\u00e1\u0003d2\u0000\u00e1\u00e2\u0003\u0012\t\u0000"+ + "\u00e2\u00e4\u0001\u0000\u0000\u0000\u00e3\u00de\u0001\u0000\u0000\u0000"+ + "\u00e3\u00df\u0001\u0000\u0000\u0000\u00e4\u0011\u0001\u0000\u0000\u0000"+ + "\u00e5\u00e6\u0006\t\uffff\uffff\u0000\u00e6\u00ea\u0003\u0014\n\u0000"+ + "\u00e7\u00e8\u0007\u0000\u0000\u0000\u00e8\u00ea\u0003\u0012\t\u0003\u00e9"+ + "\u00e5\u0001\u0000\u0000\u0000\u00e9\u00e7\u0001\u0000\u0000\u0000\u00ea"+ + "\u00f3\u0001\u0000\u0000\u0000\u00eb\u00ec\n\u0002\u0000\u0000\u00ec\u00ed"+ + "\u0007\u0001\u0000\u0000\u00ed\u00f2\u0003\u0012\t\u0003\u00ee\u00ef\n"+ + "\u0001\u0000\u0000\u00ef\u00f0\u0007\u0000\u0000\u0000\u00f0\u00f2\u0003"+ + "\u0012\t\u0002\u00f1\u00eb\u0001\u0000\u0000\u0000\u00f1\u00ee\u0001\u0000"+ + "\u0000\u0000\u00f2\u00f5\u0001\u0000\u0000\u0000\u00f3\u00f1\u0001\u0000"+ + "\u0000\u0000\u00f3\u00f4\u0001\u0000\u0000\u0000\u00f4\u0013\u0001\u0000"+ + "\u0000\u0000\u00f5\u00f3\u0001\u0000\u0000\u0000\u00f6\u00f7\u0006\n\uffff"+ + "\uffff\u0000\u00f7\u00ff\u0003>\u001f\u0000\u00f8\u00ff\u00034\u001a\u0000"+ + "\u00f9\u00ff\u0003\u0016\u000b\u0000\u00fa\u00fb\u0005+\u0000\u0000\u00fb"+ + "\u00fc\u0003\n\u0005\u0000\u00fc\u00fd\u00052\u0000\u0000\u00fd\u00ff"+ + "\u0001\u0000\u0000\u0000\u00fe\u00f6\u0001\u0000\u0000\u0000\u00fe\u00f8"+ + "\u0001\u0000\u0000\u0000\u00fe\u00f9\u0001\u0000\u0000\u0000\u00fe\u00fa"+ + "\u0001\u0000\u0000\u0000\u00ff\u0105\u0001\u0000\u0000\u0000\u0100\u0101"+ + "\n\u0001\u0000\u0000\u0101\u0102\u0005!\u0000\u0000\u0102\u0104\u0003"+ + "\u0018\f\u0000\u0103\u0100\u0001\u0000\u0000\u0000\u0104\u0107\u0001\u0000"+ + "\u0000\u0000\u0105\u0103\u0001\u0000\u0000\u0000\u0105\u0106\u0001\u0000"+ + "\u0000\u0000\u0106\u0015\u0001\u0000\u0000\u0000\u0107\u0105\u0001\u0000"+ + "\u0000\u0000\u0108\u0109\u0003:\u001d\u0000\u0109\u0113\u0005+\u0000\u0000"+ + "\u010a\u0114\u0005=\u0000\u0000\u010b\u0110\u0003\n\u0005\u0000\u010c"+ + "\u010d\u0005\"\u0000\u0000\u010d\u010f\u0003\n\u0005\u0000\u010e\u010c"+ + "\u0001\u0000\u0000\u0000\u010f\u0112\u0001\u0000\u0000\u0000\u0110\u010e"+ + "\u0001\u0000\u0000\u0000\u0110\u0111\u0001\u0000\u0000\u0000\u0111\u0114"+ + "\u0001\u0000\u0000\u0000\u0112\u0110\u0001\u0000\u0000\u0000\u0113\u010a"+ + "\u0001\u0000\u0000\u0000\u0113\u010b\u0001\u0000\u0000\u0000\u0113\u0114"+ + "\u0001\u0000\u0000\u0000\u0114\u0115\u0001\u0000\u0000\u0000\u0115\u0116"+ + "\u00052\u0000\u0000\u0116\u0017\u0001\u0000\u0000\u0000\u0117\u0118\u0003"+ + ":\u001d\u0000\u0118\u0019\u0001\u0000\u0000\u0000\u0119\u011a\u0005\f"+ + "\u0000\u0000\u011a\u011b\u0003\u001c\u000e\u0000\u011b\u001b\u0001\u0000"+ + "\u0000\u0000\u011c\u0121\u0003\u001e\u000f\u0000\u011d\u011e\u0005\"\u0000"+ + "\u0000\u011e\u0120\u0003\u001e\u000f\u0000\u011f\u011d\u0001\u0000\u0000"+ + "\u0000\u0120\u0123\u0001\u0000\u0000\u0000\u0121\u011f\u0001\u0000\u0000"+ + "\u0000\u0121\u0122\u0001\u0000\u0000\u0000\u0122\u001d\u0001\u0000\u0000"+ + "\u0000\u0123\u0121\u0001\u0000\u0000\u0000\u0124\u012a\u0003\n\u0005\u0000"+ + "\u0125\u0126\u00034\u001a\u0000\u0126\u0127\u0005 \u0000\u0000\u0127\u0128"+ + "\u0003\n\u0005\u0000\u0128\u012a\u0001\u0000\u0000\u0000\u0129\u0124\u0001"+ + "\u0000\u0000\u0000\u0129\u0125\u0001\u0000\u0000\u0000\u012a\u001f\u0001"+ + "\u0000\u0000\u0000\u012b\u012c\u0005\u0006\u0000\u0000\u012c\u0131\u0003"+ + "\"\u0011\u0000\u012d\u012e\u0005\"\u0000\u0000\u012e\u0130\u0003\"\u0011"+ + "\u0000\u012f\u012d\u0001\u0000\u0000\u0000\u0130\u0133\u0001\u0000\u0000"+ + "\u0000\u0131\u012f\u0001\u0000\u0000\u0000\u0131\u0132\u0001\u0000\u0000"+ + "\u0000\u0132\u0135\u0001\u0000\u0000\u0000\u0133\u0131\u0001\u0000\u0000"+ + "\u0000\u0134\u0136\u0003(\u0014\u0000\u0135\u0134\u0001\u0000\u0000\u0000"+ + "\u0135\u0136\u0001\u0000\u0000\u0000\u0136!\u0001\u0000\u0000\u0000\u0137"+ + "\u0138\u0003$\u0012\u0000\u0138\u0139\u0005h\u0000\u0000\u0139\u013a\u0003"+ + "&\u0013\u0000\u013a\u013d\u0001\u0000\u0000\u0000\u013b\u013d\u0003&\u0013"+ + "\u0000\u013c\u0137\u0001\u0000\u0000\u0000\u013c\u013b\u0001\u0000\u0000"+ + "\u0000\u013d#\u0001\u0000\u0000\u0000\u013e\u013f\u0005L\u0000\u0000\u013f"+ + "%\u0001\u0000\u0000\u0000\u0140\u0141\u0007\u0002\u0000\u0000\u0141\'"+ + "\u0001\u0000\u0000\u0000\u0142\u0145\u0003*\u0015\u0000\u0143\u0145\u0003"+ + ",\u0016\u0000\u0144\u0142\u0001\u0000\u0000\u0000\u0144\u0143\u0001\u0000"+ + "\u0000\u0000\u0145)\u0001\u0000\u0000\u0000\u0146\u0147\u0005K\u0000\u0000"+ + "\u0147\u014c\u0005L\u0000\u0000\u0148\u0149\u0005\"\u0000\u0000\u0149"+ + "\u014b\u0005L\u0000\u0000\u014a\u0148\u0001\u0000\u0000\u0000\u014b\u014e"+ + "\u0001\u0000\u0000\u0000\u014c\u014a\u0001\u0000\u0000\u0000\u014c\u014d"+ + "\u0001\u0000\u0000\u0000\u014d+\u0001\u0000\u0000\u0000\u014e\u014c\u0001"+ + "\u0000\u0000\u0000\u014f\u0150\u0005A\u0000\u0000\u0150\u0151\u0003*\u0015"+ + "\u0000\u0151\u0152\u0005B\u0000\u0000\u0152-\u0001\u0000\u0000\u0000\u0153"+ + "\u0154\u0005\u0014\u0000\u0000\u0154\u0159\u0003\"\u0011\u0000\u0155\u0156"+ + "\u0005\"\u0000\u0000\u0156\u0158\u0003\"\u0011\u0000\u0157\u0155\u0001"+ + "\u0000\u0000\u0000\u0158\u015b\u0001\u0000\u0000\u0000\u0159\u0157\u0001"+ + "\u0000\u0000\u0000\u0159\u015a\u0001\u0000\u0000\u0000\u015a\u015d\u0001"+ + "\u0000\u0000\u0000\u015b\u0159\u0001\u0000\u0000\u0000\u015c\u015e\u0003"+ + "\u001c\u000e\u0000\u015d\u015c\u0001\u0000\u0000\u0000\u015d\u015e\u0001"+ + "\u0000\u0000\u0000\u015e\u0161\u0001\u0000\u0000\u0000\u015f\u0160\u0005"+ + "\u001d\u0000\u0000\u0160\u0162\u0003\u001c\u000e\u0000\u0161\u015f\u0001"+ + "\u0000\u0000\u0000\u0161\u0162\u0001\u0000\u0000\u0000\u0162/\u0001\u0000"+ + "\u0000\u0000\u0163\u0164\u0005\u0004\u0000\u0000\u0164\u0165\u0003\u001c"+ + "\u000e\u0000\u01651\u0001\u0000\u0000\u0000\u0166\u0168\u0005\u000f\u0000"+ + "\u0000\u0167\u0169\u0003\u001c\u000e\u0000\u0168\u0167\u0001\u0000\u0000"+ + "\u0000\u0168\u0169\u0001\u0000\u0000\u0000\u0169\u016c\u0001\u0000\u0000"+ + "\u0000\u016a\u016b\u0005\u001d\u0000\u0000\u016b\u016d\u0003\u001c\u000e"+ + "\u0000\u016c\u016a\u0001\u0000\u0000\u0000\u016c\u016d\u0001\u0000\u0000"+ + "\u0000\u016d3\u0001\u0000\u0000\u0000\u016e\u0173\u0003:\u001d\u0000\u016f"+ + "\u0170\u0005$\u0000\u0000\u0170\u0172\u0003:\u001d\u0000\u0171\u016f\u0001"+ + "\u0000\u0000\u0000\u0172\u0175\u0001\u0000\u0000\u0000\u0173\u0171\u0001"+ + "\u0000\u0000\u0000\u0173\u0174\u0001\u0000\u0000\u0000\u01745\u0001\u0000"+ + "\u0000\u0000\u0175\u0173\u0001\u0000\u0000\u0000\u0176\u017b\u0003<\u001e"+ + "\u0000\u0177\u0178\u0005$\u0000\u0000\u0178\u017a\u0003<\u001e\u0000\u0179"+ + "\u0177\u0001\u0000\u0000\u0000\u017a\u017d\u0001\u0000\u0000\u0000\u017b"+ + "\u0179\u0001\u0000\u0000\u0000\u017b\u017c\u0001\u0000\u0000\u0000\u017c"+ + "7\u0001\u0000\u0000\u0000\u017d\u017b\u0001\u0000\u0000\u0000\u017e\u0183"+ + "\u00036\u001b\u0000\u017f\u0180\u0005\"\u0000\u0000\u0180\u0182\u0003"+ + "6\u001b\u0000\u0181\u017f\u0001\u0000\u0000\u0000\u0182\u0185\u0001\u0000"+ + "\u0000\u0000\u0183\u0181\u0001\u0000\u0000\u0000\u0183\u0184\u0001\u0000"+ + "\u0000\u0000\u01849\u0001\u0000\u0000\u0000\u0185\u0183\u0001\u0000\u0000"+ + "\u0000\u0186\u0187\u0007\u0003\u0000\u0000\u0187;\u0001\u0000\u0000\u0000"+ + "\u0188\u0189\u0005P\u0000\u0000\u0189=\u0001\u0000\u0000\u0000\u018a\u01b5"+ + "\u0005-\u0000\u0000\u018b\u018c\u0003`0\u0000\u018c\u018d\u0005C\u0000"+ + "\u0000\u018d\u01b5\u0001\u0000\u0000\u0000\u018e\u01b5\u0003^/\u0000\u018f"+ + "\u01b5\u0003`0\u0000\u0190\u01b5\u0003Z-\u0000\u0191\u01b5\u0003@ \u0000"+ + "\u0192\u01b5\u0003b1\u0000\u0193\u0194\u0005A\u0000\u0000\u0194\u0199"+ + "\u0003\\.\u0000\u0195\u0196\u0005\"\u0000\u0000\u0196\u0198\u0003\\.\u0000"+ + "\u0197\u0195\u0001\u0000\u0000\u0000\u0198\u019b\u0001\u0000\u0000\u0000"+ + "\u0199\u0197\u0001\u0000\u0000\u0000\u0199\u019a\u0001\u0000\u0000\u0000"+ + "\u019a\u019c\u0001\u0000\u0000\u0000\u019b\u0199\u0001\u0000\u0000\u0000"+ + "\u019c\u019d\u0005B\u0000\u0000\u019d\u01b5\u0001\u0000\u0000\u0000\u019e"+ + "\u019f\u0005A\u0000\u0000\u019f\u01a4\u0003Z-\u0000\u01a0\u01a1\u0005"+ + "\"\u0000\u0000\u01a1\u01a3\u0003Z-\u0000\u01a2\u01a0\u0001\u0000\u0000"+ + "\u0000\u01a3\u01a6\u0001\u0000\u0000\u0000\u01a4\u01a2\u0001\u0000\u0000"+ + "\u0000\u01a4\u01a5\u0001\u0000\u0000\u0000\u01a5\u01a7\u0001\u0000\u0000"+ + "\u0000\u01a6\u01a4\u0001\u0000\u0000\u0000\u01a7\u01a8\u0005B\u0000\u0000"+ + "\u01a8\u01b5\u0001\u0000\u0000\u0000\u01a9\u01aa\u0005A\u0000\u0000\u01aa"+ + "\u01af\u0003b1\u0000\u01ab\u01ac\u0005\"\u0000\u0000\u01ac\u01ae\u0003"+ + "b1\u0000\u01ad\u01ab\u0001\u0000\u0000\u0000\u01ae\u01b1\u0001\u0000\u0000"+ + "\u0000\u01af\u01ad\u0001\u0000\u0000\u0000\u01af\u01b0\u0001\u0000\u0000"+ + "\u0000\u01b0\u01b2\u0001\u0000\u0000\u0000\u01b1\u01af\u0001\u0000\u0000"+ + "\u0000\u01b2\u01b3\u0005B\u0000\u0000\u01b3\u01b5\u0001\u0000\u0000\u0000"+ + "\u01b4\u018a\u0001\u0000\u0000\u0000\u01b4\u018b\u0001\u0000\u0000\u0000"+ + "\u01b4\u018e\u0001\u0000\u0000\u0000\u01b4\u018f\u0001\u0000\u0000\u0000"+ + "\u01b4\u0190\u0001\u0000\u0000\u0000\u01b4\u0191\u0001\u0000\u0000\u0000"+ + "\u01b4\u0192\u0001\u0000\u0000\u0000\u01b4\u0193\u0001\u0000\u0000\u0000"+ + "\u01b4\u019e\u0001\u0000\u0000\u0000\u01b4\u01a9\u0001\u0000\u0000\u0000"+ + "\u01b5?\u0001\u0000\u0000\u0000\u01b6\u01b9\u00050\u0000\u0000\u01b7\u01b9"+ + "\u0005@\u0000\u0000\u01b8\u01b6\u0001\u0000\u0000\u0000\u01b8\u01b7\u0001"+ + "\u0000\u0000\u0000\u01b9A\u0001\u0000\u0000\u0000\u01ba\u01bb\u0005\t"+ + "\u0000\u0000\u01bb\u01bc\u0005\u001b\u0000\u0000\u01bcC\u0001\u0000\u0000"+ + "\u0000\u01bd\u01be\u0005\u000e\u0000\u0000\u01be\u01c3\u0003F#\u0000\u01bf"+ + "\u01c0\u0005\"\u0000\u0000\u01c0\u01c2\u0003F#\u0000\u01c1\u01bf\u0001"+ + "\u0000\u0000\u0000\u01c2\u01c5\u0001\u0000\u0000\u0000\u01c3\u01c1\u0001"+ + "\u0000\u0000\u0000\u01c3\u01c4\u0001\u0000\u0000\u0000\u01c4E\u0001\u0000"+ + "\u0000\u0000\u01c5\u01c3\u0001\u0000\u0000\u0000\u01c6\u01c8\u0003\n\u0005"+ + "\u0000\u01c7\u01c9\u0007\u0004\u0000\u0000\u01c8\u01c7\u0001\u0000\u0000"+ + "\u0000\u01c8\u01c9\u0001\u0000\u0000\u0000\u01c9\u01cc\u0001\u0000\u0000"+ + "\u0000\u01ca\u01cb\u0005.\u0000\u0000\u01cb\u01cd\u0007\u0005\u0000\u0000"+ + "\u01cc\u01ca\u0001\u0000\u0000\u0000\u01cc\u01cd\u0001\u0000\u0000\u0000"+ + "\u01cdG\u0001\u0000\u0000\u0000\u01ce\u01cf\u0005\b\u0000\u0000\u01cf"+ + "\u01d0\u00038\u001c\u0000\u01d0I\u0001\u0000\u0000\u0000\u01d1\u01d2\u0005"+ + "\u0002\u0000\u0000\u01d2\u01d3\u00038\u001c\u0000\u01d3K\u0001\u0000\u0000"+ + "\u0000\u01d4\u01d5\u0005\u000b\u0000\u0000\u01d5\u01da\u0003N\'\u0000"+ + "\u01d6\u01d7\u0005\"\u0000\u0000\u01d7\u01d9\u0003N\'\u0000\u01d8\u01d6"+ + "\u0001\u0000\u0000\u0000\u01d9\u01dc\u0001\u0000\u0000\u0000\u01da\u01d8"+ + "\u0001\u0000\u0000\u0000\u01da\u01db\u0001\u0000\u0000\u0000\u01dbM\u0001"+ + "\u0000\u0000\u0000\u01dc\u01da\u0001\u0000\u0000\u0000\u01dd\u01de\u0003"+ + "6\u001b\u0000\u01de\u01df\u0005T\u0000\u0000\u01df\u01e0\u00036\u001b"+ + "\u0000\u01e0O\u0001\u0000\u0000\u0000\u01e1\u01e2\u0005\u0001\u0000\u0000"+ + "\u01e2\u01e3\u0003\u0014\n\u0000\u01e3\u01e5\u0003b1\u0000\u01e4\u01e6"+ + "\u0003V+\u0000\u01e5\u01e4\u0001\u0000\u0000\u0000\u01e5\u01e6\u0001\u0000"+ + "\u0000\u0000\u01e6Q\u0001\u0000\u0000\u0000\u01e7\u01e8\u0005\u0007\u0000"+ + "\u0000\u01e8\u01e9\u0003\u0014\n\u0000\u01e9\u01ea\u0003b1\u0000\u01ea"+ + "S\u0001\u0000\u0000\u0000\u01eb\u01ec\u0005\n\u0000\u0000\u01ec\u01ed"+ + "\u00034\u001a\u0000\u01edU\u0001\u0000\u0000\u0000\u01ee\u01f3\u0003X"+ + ",\u0000\u01ef\u01f0\u0005\"\u0000\u0000\u01f0\u01f2\u0003X,\u0000\u01f1"+ + "\u01ef\u0001\u0000\u0000\u0000\u01f2\u01f5\u0001\u0000\u0000\u0000\u01f3"+ + "\u01f1\u0001\u0000\u0000\u0000\u01f3\u01f4\u0001\u0000\u0000\u0000\u01f4"+ + "W\u0001\u0000\u0000\u0000\u01f5\u01f3\u0001\u0000\u0000\u0000\u01f6\u01f7"+ + "\u0003:\u001d\u0000\u01f7\u01f8\u0005 \u0000\u0000\u01f8\u01f9\u0003>"+ + "\u001f\u0000\u01f9Y\u0001\u0000\u0000\u0000\u01fa\u01fb\u0007\u0006\u0000"+ + "\u0000\u01fb[\u0001\u0000\u0000\u0000\u01fc\u01ff\u0003^/\u0000\u01fd"+ + "\u01ff\u0003`0\u0000\u01fe\u01fc\u0001\u0000\u0000\u0000\u01fe\u01fd\u0001"+ + "\u0000\u0000\u0000\u01ff]\u0001\u0000\u0000\u0000\u0200\u0202\u0007\u0000"+ + "\u0000\u0000\u0201\u0200\u0001\u0000\u0000\u0000\u0201\u0202\u0001\u0000"+ + "\u0000\u0000\u0202\u0203\u0001\u0000\u0000\u0000\u0203\u0204\u0005\u001c"+ + "\u0000\u0000\u0204_\u0001\u0000\u0000\u0000\u0205\u0207\u0007\u0000\u0000"+ + "\u0000\u0206\u0205\u0001\u0000\u0000\u0000\u0206\u0207\u0001\u0000\u0000"+ + "\u0000\u0207\u0208\u0001\u0000\u0000\u0000\u0208\u0209\u0005\u001b\u0000"+ + "\u0000\u0209a\u0001\u0000\u0000\u0000\u020a\u020b\u0005\u001a\u0000\u0000"+ + "\u020bc\u0001\u0000\u0000\u0000\u020c\u020d\u0007\u0007\u0000\u0000\u020d"+ + "e\u0001\u0000\u0000\u0000\u020e\u020f\u0005\u0005\u0000\u0000\u020f\u0210"+ + "\u0003h4\u0000\u0210g\u0001\u0000\u0000\u0000\u0211\u0212\u0005A\u0000"+ + "\u0000\u0212\u0213\u0003\u0002\u0001\u0000\u0213\u0214\u0005B\u0000\u0000"+ + "\u0214i\u0001\u0000\u0000\u0000\u0215\u0216\u0005\r\u0000\u0000\u0216"+ + "\u0217\u0005d\u0000\u0000\u0217k\u0001\u0000\u0000\u0000\u0218\u0219\u0005"+ + "\u0003\u0000\u0000\u0219\u021c\u0005Z\u0000\u0000\u021a\u021b\u0005X\u0000"+ + "\u0000\u021b\u021d\u00036\u001b\u0000\u021c\u021a\u0001\u0000\u0000\u0000"+ + "\u021c\u021d\u0001\u0000\u0000\u0000\u021d\u0227\u0001\u0000\u0000\u0000"+ + "\u021e\u021f\u0005Y\u0000\u0000\u021f\u0224\u0003n7\u0000\u0220\u0221"+ + "\u0005\"\u0000\u0000\u0221\u0223\u0003n7\u0000\u0222\u0220\u0001\u0000"+ + "\u0000\u0000\u0223\u0226\u0001\u0000\u0000\u0000\u0224\u0222\u0001\u0000"+ + "\u0000\u0000\u0224\u0225\u0001\u0000\u0000\u0000\u0225\u0228\u0001\u0000"+ + "\u0000\u0000\u0226\u0224\u0001\u0000\u0000\u0000\u0227\u021e\u0001\u0000"+ + "\u0000\u0000\u0227\u0228\u0001\u0000\u0000\u0000\u0228m\u0001\u0000\u0000"+ + "\u0000\u0229\u022a\u00036\u001b\u0000\u022a\u022b\u0005 \u0000\u0000\u022b"+ + "\u022d\u0001\u0000\u0000\u0000\u022c\u0229\u0001\u0000\u0000\u0000\u022c"+ + "\u022d\u0001\u0000\u0000\u0000\u022d\u022e\u0001\u0000\u0000\u0000\u022e"+ + "\u022f\u00036\u001b\u0000\u022fo\u0001\u0000\u0000\u0000\u0230\u0231\u0005"+ + "\u0012\u0000\u0000\u0231\u0232\u0003\"\u0011\u0000\u0232\u0233\u0005X"+ + "\u0000\u0000\u0233\u0234\u00038\u001c\u0000\u0234q\u0001\u0000\u0000\u0000"+ + "\u0235\u0236\u0005\u0011\u0000\u0000\u0236\u0239\u0003\u001c\u000e\u0000"+ + "\u0237\u0238\u0005\u001d\u0000\u0000\u0238\u023a\u0003\u001c\u000e\u0000"+ + "\u0239\u0237\u0001\u0000\u0000\u0000\u0239\u023a\u0001\u0000\u0000\u0000"+ + "\u023as\u0001\u0000\u0000\u00006\u007f\u0088\u009a\u00a6\u00af\u00b7\u00bd"+ + "\u00c5\u00c7\u00cc\u00d3\u00d8\u00e3\u00e9\u00f1\u00f3\u00fe\u0105\u0110"+ + "\u0113\u0121\u0129\u0131\u0135\u013c\u0144\u014c\u0159\u015d\u0161\u0168"+ + "\u016c\u0173\u017b\u0183\u0199\u01a4\u01af\u01b4\u01b8\u01c3\u01c8\u01cc"+ + "\u01da\u01e5\u01f3\u01fe\u0201\u0206\u021c\u0224\u0227\u022c\u0239"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java index 192b169cc958..1442aaa99a92 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java @@ -956,18 +956,6 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { *

The default implementation does nothing.

*/ @Override public void exitShowInfo(EsqlBaseParser.ShowInfoContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void enterMetaFunctions(EsqlBaseParser.MetaFunctionsContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void exitMetaFunctions(EsqlBaseParser.MetaFunctionsContext ctx) { } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java index de98d4333c1d..3a3ef05c7a46 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java @@ -566,13 +566,6 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitShowInfo(EsqlBaseParser.ShowInfoContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitMetaFunctions(EsqlBaseParser.MetaFunctionsContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java index 4348c641d9f6..5d2d417f30c5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java @@ -861,18 +861,6 @@ public interface EsqlBaseParserListener extends ParseTreeListener { * @param ctx the parse tree */ void exitShowInfo(EsqlBaseParser.ShowInfoContext ctx); - /** - * Enter a parse tree produced by the {@code metaFunctions} - * labeled alternative in {@link EsqlBaseParser#metaCommand}. - * @param ctx the parse tree - */ - void enterMetaFunctions(EsqlBaseParser.MetaFunctionsContext ctx); - /** - * Exit a parse tree produced by the {@code metaFunctions} - * labeled alternative in {@link EsqlBaseParser#metaCommand}. - * @param ctx the parse tree - */ - void exitMetaFunctions(EsqlBaseParser.MetaFunctionsContext ctx); /** * Enter a parse tree produced by {@link EsqlBaseParser#enrichCommand}. * @param ctx the parse tree diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java index c334526abfe3..51f2e845bcc5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java @@ -519,13 +519,6 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitShowInfo(EsqlBaseParser.ShowInfoContext ctx); - /** - * Visit a parse tree produced by the {@code metaFunctions} - * labeled alternative in {@link EsqlBaseParser#metaCommand}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitMetaFunctions(EsqlBaseParser.MetaFunctionsContext ctx); /** * Visit a parse tree produced by {@link EsqlBaseParser#enrichCommand}. * @param ctx the parse tree diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index 8dc07e2e1017..d97c1aefd548 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -53,7 +53,6 @@ import org.elasticsearch.xpack.esql.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.plan.logical.Rename; import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.UnresolvedRelation; -import org.elasticsearch.xpack.esql.plan.logical.meta.MetaFunctions; import org.elasticsearch.xpack.esql.plan.logical.show.ShowInfo; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.joni.exception.SyntaxException; @@ -412,11 +411,6 @@ public class LogicalPlanBuilder extends ExpressionBuilder { return new ShowInfo(source(ctx)); } - @Override - public LogicalPlan visitMetaFunctions(EsqlBaseParser.MetaFunctionsContext ctx) { - return new MetaFunctions(source(ctx)); - } - @Override public PlanFactory visitEnrichCommand(EsqlBaseParser.EnrichCommandContext ctx) { return p -> { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/meta/MetaFunctions.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/meta/MetaFunctions.java deleted file mode 100644 index 029cb6164167..000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/meta/MetaFunctions.java +++ /dev/null @@ -1,143 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.plan.logical.meta; - -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; -import org.elasticsearch.xpack.esql.plan.logical.LeafPlan; -import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Comparator; -import java.util.List; -import java.util.function.Function; -import java.util.stream.Collectors; - -import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; -import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; - -public class MetaFunctions extends LeafPlan { - - private final List attributes; - - public MetaFunctions(Source source) { - super(source); - - attributes = new ArrayList<>(); - for (var name : List.of("name", "synopsis", "argNames", "argTypes", "argDescriptions", "returnType", "description")) { - attributes.add(new ReferenceAttribute(Source.EMPTY, name, KEYWORD)); - } - for (var name : List.of("optionalArgs", "variadic", "isAggregation")) { - attributes.add(new ReferenceAttribute(Source.EMPTY, name, BOOLEAN)); - } - } - - @Override - public void writeTo(StreamOutput out) { - throw new UnsupportedOperationException("not serialized"); - } - - @Override - public String getWriteableName() { - throw new UnsupportedOperationException("not serialized"); - } - - @Override - public List output() { - return attributes; - } - - public List> values(EsqlFunctionRegistry functionRegistry) { - List> rows = new ArrayList<>(); - for (var def : functionRegistry.listFunctions(null)) { - EsqlFunctionRegistry.FunctionDescription signature = EsqlFunctionRegistry.description(def); - List row = new ArrayList<>(); - row.add(asBytesRefOrNull(signature.name())); - row.add(new BytesRef(signature.fullSignature())); - row.add(collect(signature, EsqlFunctionRegistry.ArgSignature::name)); - row.add(collect(signature, EsqlFunctionRegistry.ArgSignature::type)); - row.add(collect(signature, EsqlFunctionRegistry.ArgSignature::description)); - row.add(withPipes(signature.returnType())); - row.add(signature.description()); - row.add(collect(signature, EsqlFunctionRegistry.ArgSignature::optional)); - row.add(signature.variadic()); - row.add(signature.isAggregation()); - rows.add(row); - } - rows.sort(Comparator.comparing(x -> ((BytesRef) x.get(0)))); - return rows; - } - - private Object collect(EsqlFunctionRegistry.FunctionDescription signature, Function x) { - if (signature.args().size() == 0) { - return null; - } - if (signature.args().size() == 1) { - Object result = x.apply(signature.args().get(0)); - if (result instanceof String[] r) { - return withPipes(r); - } - return result; - } - - List args = signature.args(); - List result = signature.args().stream().map(x).collect(Collectors.toList()); - boolean withPipes = result.get(0) instanceof String[]; - if (result.isEmpty() == false) { - List newResult = new ArrayList<>(); - for (int i = 0; i < result.size(); i++) { - if (signature.variadic() && args.get(i).optional()) { - continue; - } - newResult.add(withPipes ? withPipes((String[]) result.get(i)) : result.get(i)); - } - return newResult; - } - return result; - } - - public static String withPipes(String[] items) { - return Arrays.stream(items).collect(Collectors.joining("|")); - } - - private static BytesRef asBytesRefOrNull(String string) { - return Strings.hasText(string) ? new BytesRef(string) : null; - } - - @Override - public String commandName() { - return "META FUNCTIONS"; - } - - @Override - public boolean expressionsResolved() { - return true; - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this); - } - - @Override - public int hashCode() { - return getClass().hashCode(); - } - - @Override - public boolean equals(Object obj) { - return this == obj || obj != null && getClass() == obj.getClass(); - } -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java index 9613fa1f3fcd..e571be54692c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java @@ -32,7 +32,6 @@ import org.elasticsearch.xpack.esql.plan.logical.join.Join; import org.elasticsearch.xpack.esql.plan.logical.join.JoinConfig; import org.elasticsearch.xpack.esql.plan.logical.join.JoinType; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; -import org.elasticsearch.xpack.esql.plan.logical.meta.MetaFunctions; import org.elasticsearch.xpack.esql.plan.logical.show.ShowInfo; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.DissectExec; @@ -98,9 +97,6 @@ public class Mapper { } // Commands - if (p instanceof MetaFunctions metaFunctions) { - return new ShowExec(metaFunctions.source(), metaFunctions.output(), metaFunctions.values(functionRegistry)); - } if (p instanceof ShowInfo showInfo) { return new ShowExec(showInfo.source(), showInfo.output(), showInfo.values()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java index c4d890a818ec..4cae2a9c247f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/FeatureMetric.java @@ -21,7 +21,6 @@ import org.elasticsearch.xpack.esql.plan.logical.MvExpand; import org.elasticsearch.xpack.esql.plan.logical.OrderBy; import org.elasticsearch.xpack.esql.plan.logical.Rename; import org.elasticsearch.xpack.esql.plan.logical.Row; -import org.elasticsearch.xpack.esql.plan.logical.meta.MetaFunctions; import org.elasticsearch.xpack.esql.plan.logical.show.ShowInfo; import java.util.BitSet; @@ -29,11 +28,6 @@ import java.util.Locale; import java.util.function.Predicate; public enum FeatureMetric { - /** - * The order of these enum values is important, do not change it. - * For any new values added to it, they should go at the end of the list. - * see {@link org.elasticsearch.xpack.esql.analysis.Verifier#gatherMetrics} - */ DISSECT(Dissect.class::isInstance), EVAL(Eval.class::isInstance), GROK(Grok.class::isInstance), @@ -48,8 +42,7 @@ public enum FeatureMetric { FROM(EsRelation.class::isInstance), DROP(Drop.class::isInstance), KEEP(Keep.class::isInstance), - RENAME(Rename.class::isInstance), - META(MetaFunctions.class::isInstance); + RENAME(Rename.class::isInstance); private Predicate planCheck; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index adf31ca98306..27656c8122e3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -921,7 +921,6 @@ public class StatementParserTests extends AbstractStatementParserTests { public void testMetadataFieldOnOtherSources() { expectError("row a = 1 metadata _index", "line 1:20: extraneous input '_index' expecting "); - expectError("meta functions metadata _index", "line 1:16: token recognition error at: 'm'"); expectError("show info metadata _index", "line 1:11: token recognition error at: 'm'"); expectError( "explain [from foo] metadata _index", diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java index ab004a3a055c..203e5c3bd37e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/VerifierMetricsTests.java @@ -24,7 +24,6 @@ import static org.elasticsearch.xpack.esql.stats.FeatureMetric.FROM; import static org.elasticsearch.xpack.esql.stats.FeatureMetric.GROK; import static org.elasticsearch.xpack.esql.stats.FeatureMetric.KEEP; import static org.elasticsearch.xpack.esql.stats.FeatureMetric.LIMIT; -import static org.elasticsearch.xpack.esql.stats.FeatureMetric.META; import static org.elasticsearch.xpack.esql.stats.FeatureMetric.MV_EXPAND; import static org.elasticsearch.xpack.esql.stats.FeatureMetric.RENAME; import static org.elasticsearch.xpack.esql.stats.FeatureMetric.ROW; @@ -55,7 +54,6 @@ public class VerifierMetricsTests extends ESTestCase { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); - assertEquals(0, meta(c)); } public void testEvalQuery() { @@ -75,7 +73,6 @@ public class VerifierMetricsTests extends ESTestCase { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); - assertEquals(0, meta(c)); } public void testGrokQuery() { @@ -95,7 +92,6 @@ public class VerifierMetricsTests extends ESTestCase { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); - assertEquals(0, meta(c)); } public void testLimitQuery() { @@ -115,7 +111,6 @@ public class VerifierMetricsTests extends ESTestCase { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); - assertEquals(0, meta(c)); } public void testSortQuery() { @@ -135,7 +130,6 @@ public class VerifierMetricsTests extends ESTestCase { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); - assertEquals(0, meta(c)); } public void testStatsQuery() { @@ -155,7 +149,6 @@ public class VerifierMetricsTests extends ESTestCase { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); - assertEquals(0, meta(c)); } public void testWhereQuery() { @@ -175,7 +168,6 @@ public class VerifierMetricsTests extends ESTestCase { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); - assertEquals(0, meta(c)); } public void testTwoWhereQuery() { @@ -195,7 +187,6 @@ public class VerifierMetricsTests extends ESTestCase { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); - assertEquals(0, meta(c)); } public void testTwoQueriesExecuted() { @@ -235,7 +226,6 @@ public class VerifierMetricsTests extends ESTestCase { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); - assertEquals(0, meta(c)); } public void testEnrich() { @@ -261,7 +251,6 @@ public class VerifierMetricsTests extends ESTestCase { assertEquals(0, drop(c)); assertEquals(1L, keep(c)); assertEquals(0, rename(c)); - assertEquals(0, meta(c)); } public void testMvExpand() { @@ -290,27 +279,6 @@ public class VerifierMetricsTests extends ESTestCase { assertEquals(0, drop(c)); assertEquals(1L, keep(c)); assertEquals(0, rename(c)); - assertEquals(0, meta(c)); - } - - public void testMetaFunctions() { - Counters c = esql("meta functions | stats a = count(*) | mv_expand a"); - assertEquals(0, dissect(c)); - assertEquals(0, eval(c)); - assertEquals(0, grok(c)); - assertEquals(0, limit(c)); - assertEquals(0, sort(c)); - assertEquals(1L, stats(c)); - assertEquals(0, where(c)); - assertEquals(0, enrich(c)); - assertEquals(1L, mvExpand(c)); - assertEquals(0, show(c)); - assertEquals(0, row(c)); - assertEquals(0, from(c)); - assertEquals(0, drop(c)); - assertEquals(0, keep(c)); - assertEquals(0, rename(c)); - assertEquals(1L, meta(c)); } public void testShowInfo() { @@ -330,7 +298,6 @@ public class VerifierMetricsTests extends ESTestCase { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); - assertEquals(0, meta(c)); } public void testRow() { @@ -350,7 +317,6 @@ public class VerifierMetricsTests extends ESTestCase { assertEquals(0, drop(c)); assertEquals(0, keep(c)); assertEquals(0, rename(c)); - assertEquals(0, meta(c)); } public void testDropAndRename() { @@ -370,7 +336,6 @@ public class VerifierMetricsTests extends ESTestCase { assertEquals(1L, drop(c)); assertEquals(0, keep(c)); assertEquals(1L, rename(c)); - assertEquals(0, meta(c)); } public void testKeep() { @@ -395,7 +360,6 @@ public class VerifierMetricsTests extends ESTestCase { assertEquals(0, drop(c)); assertEquals(1L, keep(c)); assertEquals(0, rename(c)); - assertEquals(0, meta(c)); } private long dissect(Counters c) { @@ -458,10 +422,6 @@ public class VerifierMetricsTests extends ESTestCase { return c.get(FPREFIX + RENAME); } - private long meta(Counters c) { - return c.get(FPREFIX + META); - } - private Counters esql(String esql) { return esql(esql, null); } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml index 74c0e9ef1bb3..8bbdb27a87d1 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml @@ -1,8 +1,13 @@ --- setup: - requires: - cluster_features: ["gte_v8.14.0"] - reason: "Introduction of META tracking in 8.14+" + capabilities: + - method: POST + path: /_query + parameters: [ method, path, parameters, capabilities ] + capabilities: [ no_meta ] + reason: "META command removed which changes the count of the data returned" + test_runner_features: [capabilities] - do: indices.create: @@ -23,7 +28,7 @@ setup: - do: {xpack.usage: {}} - match: { esql.available: true } - match: { esql.enabled: true } - - length: { esql.features: 16 } + - length: { esql.features: 15 } - set: {esql.features.dissect: dissect_counter} - set: {esql.features.drop: drop_counter} - set: {esql.features.eval: eval_counter} @@ -32,7 +37,6 @@ setup: - set: {esql.features.grok: grok_counter} - set: {esql.features.keep: keep_counter} - set: {esql.features.limit: limit_counter} - - set: {esql.features.meta: meta_counter} - set: {esql.features.mv_expand: mv_expand_counter} - set: {esql.features.rename: rename_counter} - set: {esql.features.row: row_counter} From 43e5258b3c3b5e468b6177d1347e20ea98104513 Mon Sep 17 00:00:00 2001 From: Pete Gillin Date: Tue, 8 Oct 2024 17:39:53 +0100 Subject: [PATCH 55/85] Add a `terminate` ingest processor (#114157) This processor simply causes any remaining processors in the pipeline to be skipped. It will normally be executed conditionally using the `if` option. (If this pipeline is being called from another pipeline, the calling pipeline is *not* terminated.) For example, this: ``` POST /_ingest/pipeline/_simulate { "pipeline": { "description": "Appends just 'before' to the steps field if the number field is present, or both 'before' and 'after' if not", "processors": [ { "append": { "field": "steps", "value": "before" } }, { "terminate": { "if": "ctx.error != null" } }, { "append": { "field": "steps", "value": "after" } } ] }, "docs": [ { "_index": "index", "_id": "doc1", "_source": { "name": "okay", "steps": [] } }, { "_index": "index", "_id": "doc2", "_source": { "name": "bad", "error": "oh no", "steps": [] } } ] } ``` returns something like this: ``` { "docs": [ { "doc": { "_index": "index", "_version": "-3", "_id": "doc1", "_source": { "name": "okay", "steps": [ "before", "after" ] }, "_ingest": { "timestamp": "2024-10-04T16:25:20.448881Z" } } }, { "doc": { "_index": "index", "_version": "-3", "_id": "doc2", "_source": { "name": "bad", "error": "oh no", "steps": [ "before" ] }, "_ingest": { "timestamp": "2024-10-04T16:25:20.448932Z" } } } ] } ``` --- docs/changelog/114157.yaml | 6 + .../ingest/processors/terminate.asciidoc | 30 ++++ .../ingest/common/IngestCommonPlugin.java | 1 + .../ingest/common/TerminateProcessor.java | 53 +++++++ .../common/TerminateProcessorTests.java | 70 +++++++++ .../test/ingest/330_terminate_processor.yml | 138 ++++++++++++++++++ .../ingest/CompoundProcessor.java | 7 +- .../elasticsearch/ingest/IngestDocument.java | 22 +++ .../org/elasticsearch/ingest/Pipeline.java | 3 + 9 files changed, 327 insertions(+), 3 deletions(-) create mode 100644 docs/changelog/114157.yaml create mode 100644 docs/reference/ingest/processors/terminate.asciidoc create mode 100644 modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/TerminateProcessor.java create mode 100644 modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/TerminateProcessorTests.java create mode 100644 modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/330_terminate_processor.yml diff --git a/docs/changelog/114157.yaml b/docs/changelog/114157.yaml new file mode 100644 index 000000000000..22e0fda173e9 --- /dev/null +++ b/docs/changelog/114157.yaml @@ -0,0 +1,6 @@ +pr: 114157 +summary: Add a `terminate` ingest processor +area: Ingest Node +type: feature +issues: + - 110218 diff --git a/docs/reference/ingest/processors/terminate.asciidoc b/docs/reference/ingest/processors/terminate.asciidoc new file mode 100644 index 000000000000..a2643fbd955f --- /dev/null +++ b/docs/reference/ingest/processors/terminate.asciidoc @@ -0,0 +1,30 @@ +[[terminate-processor]] +=== Terminate processor + +++++ +Terminate +++++ + +Terminates the current ingest pipeline, causing no further processors to be run. +This will normally be executed conditionally, using the `if` option. + +If this pipeline is being called from another pipeline, the calling pipeline is *not* terminated. + +[[terminate-options]] +.Terminate Options +[options="header"] +|====== +| Name | Required | Default | Description +include::common-options.asciidoc[] +|====== + +[source,js] +-------------------------------------------------- +{ + "description" : "terminates the current pipeline if the error field is present", + "terminate": { + "if": "ctx.error != null" + } +} +-------------------------------------------------- +// NOTCONSOLE diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java index 6ef636847e2d..d585c6217202 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java @@ -72,6 +72,7 @@ public class IngestCommonPlugin extends Plugin implements ActionPlugin, IngestPl entry(SetProcessor.TYPE, new SetProcessor.Factory(parameters.scriptService)), entry(SortProcessor.TYPE, new SortProcessor.Factory()), entry(SplitProcessor.TYPE, new SplitProcessor.Factory()), + entry(TerminateProcessor.TYPE, new TerminateProcessor.Factory()), entry(TrimProcessor.TYPE, new TrimProcessor.Factory()), entry(URLDecodeProcessor.TYPE, new URLDecodeProcessor.Factory()), entry(UppercaseProcessor.TYPE, new UppercaseProcessor.Factory()), diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/TerminateProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/TerminateProcessor.java new file mode 100644 index 000000000000..5b6144ba8eab --- /dev/null +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/TerminateProcessor.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.ingest.common; + +import org.elasticsearch.ingest.AbstractProcessor; +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; + +import java.util.Map; + +/** + * A {@link Processor} which simply prevents subsequent processors in the pipeline from running (without failing, like {@link FailProcessor} + * does). This will normally be run conditionally, using the {@code if} option. + */ +public class TerminateProcessor extends AbstractProcessor { + + static final String TYPE = "terminate"; + + TerminateProcessor(String tag, String description) { + super(tag, description); + } + + @Override + public IngestDocument execute(IngestDocument ingestDocument) { + ingestDocument.terminate(); + return ingestDocument; + } + + @Override + public String getType() { + return TYPE; + } + + public static final class Factory implements Processor.Factory { + + @Override + public Processor create( + Map processorFactories, + String tag, + String description, + Map config + ) { + return new TerminateProcessor(tag, description); + } + } +} diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/TerminateProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/TerminateProcessorTests.java new file mode 100644 index 000000000000..1888f8366edd --- /dev/null +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/TerminateProcessorTests.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.ingest.common; + +import org.elasticsearch.ingest.CompoundProcessor; +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Pipeline; +import org.elasticsearch.ingest.TestTemplateService; +import org.elasticsearch.ingest.ValueSource; +import org.elasticsearch.test.ESTestCase; + +import java.util.Map; + +import static org.elasticsearch.ingest.RandomDocumentPicks.randomIngestDocument; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; + +public class TerminateProcessorTests extends ESTestCase { + + public void testTerminateInPipeline() throws Exception { + Pipeline pipeline = new Pipeline( + "my-pipeline", + null, + null, + null, + new CompoundProcessor( + new SetProcessor( + "before-set", + "Sets before field to true", + new TestTemplateService.MockTemplateScript.Factory("before"), + ValueSource.wrap(true, TestTemplateService.instance()), + null + ), + new TerminateProcessor("terminate", "terminates the pipeline"), + new SetProcessor( + "after-set", + "Sets after field to true", + new TestTemplateService.MockTemplateScript.Factory("after"), + ValueSource.wrap(true, TestTemplateService.instance()), + null + ) + ) + ); + IngestDocument input = randomIngestDocument(random(), Map.of("foo", "bar")); + PipelineOutput output = new PipelineOutput(); + + pipeline.execute(input, output::set); + + assertThat(output.exception, nullValue()); + // We expect the before-set processor to have run, but not the after-set one: + assertThat(output.document.getSource(), is(Map.of("foo", "bar", "before", true))); + } + + private static class PipelineOutput { + IngestDocument document; + Exception exception; + + void set(IngestDocument document, Exception exception) { + this.document = document; + this.exception = exception; + } + } +} diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/330_terminate_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/330_terminate_processor.yml new file mode 100644 index 000000000000..7a46d7bb272d --- /dev/null +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/330_terminate_processor.yml @@ -0,0 +1,138 @@ +--- +setup: + - do: + ingest.put_pipeline: + id: "test-pipeline" + body: > + { + "description": "Appends just 'before' to the steps field if the number field is less than 50, or both 'before' and 'after' if not", + "processors": [ + { + "append": { + "field": "steps", + "value": "before" + } + }, + { + "terminate": { + "if": "ctx.number < 50" + } + }, + { + "append": { + "field": "steps", + "value": "after" + } + } + ] + } + - do: + ingest.put_pipeline: + id: "test-final-pipeline" + body: > + { + "description": "Appends 'final' to the steps field", + "processors": [ + { + "append": { + "field": "steps", + "value": "final" + } + } + ] + } + - do: + ingest.put_pipeline: + id: "test-outer-pipeline" + body: > + { + "description": "Runs test-pipeline and then append 'outer' to the steps field", + "processors": [ + { + "pipeline": { + "name": "test-pipeline" + } + }, + { + "append": { + "field": "steps", + "value": "outer" + } + } + ] + } + - do: + indices.create: + index: "test-index-with-default-and-final-pipelines" + body: + settings: + index: + default_pipeline: "test-pipeline" + final_pipeline: "test-final-pipeline" + - do: + indices.create: + index: "test-vanilla-index" + +--- +teardown: + - do: + indices.delete: + index: "test-index-with-default-and-final-pipelines" + ignore_unavailable: true + - do: + indices.delete: + index: "test-vanilla-index" + ignore_unavailable: true + - do: + ingest.delete_pipeline: + id: "test-pipeline" + ignore: 404 + - do: + ingest.delete_pipeline: + id: "test-outer-pipeline" + ignore: 404 + +--- +"Test pipeline including conditional terminate pipeline": + + - do: + bulk: + refresh: true + body: + - '{ "index": {"_index": "test-index-with-default-and-final-pipelines" } }' + - '{ "comment": "should terminate", "number": 40, "steps": [] }' + - '{ "index": {"_index": "test-index-with-default-and-final-pipelines" } }' + - '{ "comment": "should continue to end", "number": 60, "steps": [] }' + + - do: + search: + rest_total_hits_as_int: true + index: "test-index-with-default-and-final-pipelines" + body: + sort: "number" + - match: { hits.total: 2 } + - match: { hits.hits.0._source.number: 40 } + - match: { hits.hits.1._source.number: 60 } + - match: { hits.hits.0._source.steps: ["before", "final"] } + - match: { hits.hits.1._source.steps: ["before", "after", "final"] } + +--- +"Test pipeline with terminate invoked from an outer pipeline": + + - do: + bulk: + refresh: true + pipeline: "test-outer-pipeline" + body: + - '{ "index": {"_index": "test-vanilla-index" } }' + - '{ "comment": "should terminate inner pipeline but not outer", "number": 40, "steps": [] }' + + - do: + search: + rest_total_hits_as_int: true + index: "test-vanilla-index" + body: + sort: "number" + - match: { hits.total: 1 } + - match: { hits.hits.0._source.number: 40 } + - match: { hits.hits.0._source.steps: ["before", "outer"] } diff --git a/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java b/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java index 9620becd49d5..873f334d0a65 100644 --- a/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java +++ b/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java @@ -148,7 +148,7 @@ public class CompoundProcessor implements Processor { void innerExecute(int currentProcessor, IngestDocument ingestDocument, final BiConsumer handler) { assert currentProcessor <= processorsWithMetrics.size(); - if (currentProcessor == processorsWithMetrics.size() || ingestDocument.isReroute()) { + if (currentProcessor == processorsWithMetrics.size() || ingestDocument.isReroute() || ingestDocument.isTerminate()) { handler.accept(ingestDocument, null); return; } @@ -159,7 +159,8 @@ public class CompoundProcessor implements Processor { // iteratively execute any sync processors while (currentProcessor < processorsWithMetrics.size() && processorsWithMetrics.get(currentProcessor).v1().isAsync() == false - && ingestDocument.isReroute() == false) { + && ingestDocument.isReroute() == false + && ingestDocument.isTerminate() == false) { processorWithMetric = processorsWithMetrics.get(currentProcessor); processor = processorWithMetric.v1(); metric = processorWithMetric.v2(); @@ -185,7 +186,7 @@ public class CompoundProcessor implements Processor { } assert currentProcessor <= processorsWithMetrics.size(); - if (currentProcessor == processorsWithMetrics.size() || ingestDocument.isReroute()) { + if (currentProcessor == processorsWithMetrics.size() || ingestDocument.isReroute() || ingestDocument.isTerminate()) { handler.accept(ingestDocument, null); return; } diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java index 0bc1c0d2932d..280c7684a855 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -82,6 +82,7 @@ public final class IngestDocument { private boolean doNoSelfReferencesCheck = false; private boolean reroute = false; + private boolean terminate = false; public IngestDocument(String index, String id, long version, String routing, VersionType versionType, Map source) { this.ctxMap = new IngestCtxMap(index, id, version, routing, versionType, ZonedDateTime.now(ZoneOffset.UTC), source); @@ -935,6 +936,27 @@ public final class IngestDocument { reroute = false; } + /** + * Sets the terminate flag to true, to indicate that no further processors in the current pipeline should be run for this document. + */ + public void terminate() { + terminate = true; + } + + /** + * Returns whether the {@link #terminate()} flag was set. + */ + boolean isTerminate() { + return terminate; + } + + /** + * Resets the {@link #terminate()} flag. + */ + void resetTerminate() { + terminate = false; + } + public enum Metadata { INDEX(IndexFieldMapper.NAME), TYPE("_type"), diff --git a/server/src/main/java/org/elasticsearch/ingest/Pipeline.java b/server/src/main/java/org/elasticsearch/ingest/Pipeline.java index 6153d45bce77..a8e8fbb5d321 100644 --- a/server/src/main/java/org/elasticsearch/ingest/Pipeline.java +++ b/server/src/main/java/org/elasticsearch/ingest/Pipeline.java @@ -133,6 +133,9 @@ public final class Pipeline { if (e != null) { metrics.ingestFailed(); } + // Reset the terminate status now that pipeline execution is complete (if this was executed as part of another pipeline, the + // outer pipeline should continue): + ingestDocument.resetTerminate(); handler.accept(result, e); }); } From 0ed595988987e44cbaaef5564bb708a0e1d07fdc Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Tue, 8 Oct 2024 09:53:49 -0700 Subject: [PATCH 56/85] Collect query metrics on search nodes (#114267) When I added the query/fetch metrics, I overlooked that non-primary shards were being skipped during metrics collection, and the stateful tests didn't catch it. This change ensures that search metrics are now collected from every shard copy. --- .../monitor/metrics/IndicesMetricsIT.java | 177 ++++++++---------- .../monitor/metrics/IndicesMetrics.java | 69 ++++--- 2 files changed, 121 insertions(+), 125 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/IndicesMetricsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/IndicesMetricsIT.java index 4a060eadc735..fb563ee333d0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/IndicesMetricsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/IndicesMetricsIT.java @@ -107,14 +107,14 @@ public class IndicesMetricsIT extends ESIntegTestCase { static final String LOGSDB_INDEXING_TIME = "es.indices.logsdb.indexing.time"; static final String LOGSDB_INDEXING_FAILURE = "es.indices.logsdb.indexing.failure.total"; - public void testIndicesMetrics() throws Exception { - String node = internalCluster().startNode(); + public void testIndicesMetrics() { + String indexNode = internalCluster().startNode(); ensureStableCluster(1); - final TestTelemetryPlugin telemetry = internalCluster().getInstance(PluginsService.class, node) + TestTelemetryPlugin telemetry = internalCluster().getInstance(PluginsService.class, indexNode) .filterPlugins(TestTelemetryPlugin.class) .findFirst() .orElseThrow(); - final IndicesService indicesService = internalCluster().getInstance(IndicesService.class, node); + IndicesService indicesService = internalCluster().getInstance(IndicesService.class, indexNode); var indexing0 = indicesService.stats(CommonStatsFlags.ALL, false).getIndexing().getTotal(); telemetry.resetMeter(); long numStandardIndices = randomIntBetween(1, 5); @@ -131,19 +131,12 @@ public class IndicesMetricsIT extends ESIntegTestCase { STANDARD_BYTES_SIZE, greaterThan(0L), - TIME_SERIES_INDEX_COUNT, - equalTo(0L), - TIME_SERIES_DOCS_COUNT, - equalTo(0L), - TIME_SERIES_BYTES_SIZE, - equalTo(0L), - - LOGSDB_INDEX_COUNT, - equalTo(0L), - LOGSDB_DOCS_COUNT, - equalTo(0L), - LOGSDB_BYTES_SIZE, - equalTo(0L) + STANDARD_INDEXING_COUNT, + equalTo(numStandardDocs), + STANDARD_INDEXING_TIME, + greaterThanOrEqualTo(0L), + STANDARD_INDEXING_FAILURE, + equalTo(indexing1.getIndexFailedCount() - indexing0.getIndexCount()) ) ); @@ -154,13 +147,6 @@ public class IndicesMetricsIT extends ESIntegTestCase { telemetry, 2, Map.of( - STANDARD_INDEX_COUNT, - equalTo(numStandardIndices), - STANDARD_DOCS_COUNT, - equalTo(numStandardDocs), - STANDARD_BYTES_SIZE, - greaterThan(0L), - TIME_SERIES_INDEX_COUNT, equalTo(numTimeSeriesIndices), TIME_SERIES_DOCS_COUNT, @@ -168,12 +154,12 @@ public class IndicesMetricsIT extends ESIntegTestCase { TIME_SERIES_BYTES_SIZE, greaterThan(20L), - LOGSDB_INDEX_COUNT, - equalTo(0L), - LOGSDB_DOCS_COUNT, - equalTo(0L), - LOGSDB_BYTES_SIZE, - equalTo(0L) + TIME_SERIES_INDEXING_COUNT, + equalTo(numTimeSeriesDocs), + TIME_SERIES_INDEXING_TIME, + greaterThanOrEqualTo(0L), + TIME_SERIES_INDEXING_FAILURE, + equalTo(indexing2.getIndexFailedCount() - indexing1.getIndexFailedCount()) ) ); @@ -184,47 +170,12 @@ public class IndicesMetricsIT extends ESIntegTestCase { telemetry, 3, Map.of( - STANDARD_INDEX_COUNT, - equalTo(numStandardIndices), - STANDARD_DOCS_COUNT, - equalTo(numStandardDocs), - STANDARD_BYTES_SIZE, - greaterThan(0L), - - TIME_SERIES_INDEX_COUNT, - equalTo(numTimeSeriesIndices), - TIME_SERIES_DOCS_COUNT, - equalTo(numTimeSeriesDocs), - TIME_SERIES_BYTES_SIZE, - greaterThan(20L), - LOGSDB_INDEX_COUNT, equalTo(numLogsdbIndices), LOGSDB_DOCS_COUNT, equalTo(numLogsdbDocs), LOGSDB_BYTES_SIZE, - greaterThan(0L) - ) - ); - // indexing stats - collectThenAssertMetrics( - telemetry, - 4, - Map.of( - STANDARD_INDEXING_COUNT, - equalTo(numStandardDocs), - STANDARD_INDEXING_TIME, - greaterThanOrEqualTo(0L), - STANDARD_INDEXING_FAILURE, - equalTo(indexing1.getIndexFailedCount() - indexing0.getIndexCount()), - - TIME_SERIES_INDEXING_COUNT, - equalTo(numTimeSeriesDocs), - TIME_SERIES_INDEXING_TIME, - greaterThanOrEqualTo(0L), - TIME_SERIES_INDEXING_FAILURE, - equalTo(indexing2.getIndexFailedCount() - indexing1.getIndexFailedCount()), - + greaterThan(0L), LOGSDB_INDEXING_COUNT, equalTo(numLogsdbDocs), LOGSDB_INDEXING_TIME, @@ -233,11 +184,44 @@ public class IndicesMetricsIT extends ESIntegTestCase { equalTo(indexing3.getIndexFailedCount() - indexing2.getIndexFailedCount()) ) ); - telemetry.resetMeter(); + // already collected indexing stats + collectThenAssertMetrics( + telemetry, + 4, + Map.of( + STANDARD_INDEXING_COUNT, + equalTo(0L), + STANDARD_INDEXING_TIME, + equalTo(0L), + STANDARD_INDEXING_FAILURE, + equalTo(0L), + TIME_SERIES_INDEXING_COUNT, + equalTo(0L), + TIME_SERIES_INDEXING_TIME, + equalTo(0L), + TIME_SERIES_INDEXING_FAILURE, + equalTo(0L), + + LOGSDB_INDEXING_COUNT, + equalTo(0L), + LOGSDB_INDEXING_TIME, + equalTo(0L), + LOGSDB_INDEXING_FAILURE, + equalTo(0L) + ) + ); + String searchNode = internalCluster().startDataOnlyNode(); + indicesService = internalCluster().getInstance(IndicesService.class, searchNode); + telemetry = internalCluster().getInstance(PluginsService.class, searchNode) + .filterPlugins(TestTelemetryPlugin.class) + .findFirst() + .orElseThrow(); + ensureGreen("st*", "log*", "time*"); // search and fetch - client().prepareSearch("standard*").setSize(100).get().decRef(); - var nodeStats1 = indicesService.stats(CommonStatsFlags.ALL, false).getSearch().getTotal(); + String preference = "_only_local"; + client(searchNode).prepareSearch("standard*").setPreference(preference).setSize(100).get().decRef(); + var search1 = indicesService.stats(CommonStatsFlags.ALL, false).getSearch().getTotal(); collectThenAssertMetrics( telemetry, 1, @@ -245,11 +229,11 @@ public class IndicesMetricsIT extends ESIntegTestCase { STANDARD_QUERY_COUNT, equalTo(numStandardIndices), STANDARD_QUERY_TIME, - equalTo(nodeStats1.getQueryTimeInMillis()), + equalTo(search1.getQueryTimeInMillis()), STANDARD_FETCH_COUNT, - equalTo(nodeStats1.getFetchCount()), + equalTo(search1.getFetchCount()), STANDARD_FETCH_TIME, - equalTo(nodeStats1.getFetchTimeInMillis()), + equalTo(search1.getFetchTimeInMillis()), TIME_SERIES_QUERY_COUNT, equalTo(0L), @@ -263,25 +247,25 @@ public class IndicesMetricsIT extends ESIntegTestCase { ) ); - client().prepareSearch("time*").setSize(100).get().decRef(); - var nodeStats2 = indicesService.stats(CommonStatsFlags.ALL, false).getSearch().getTotal(); + client(searchNode).prepareSearch("time*").setPreference(preference).setSize(100).get().decRef(); + var search2 = indicesService.stats(CommonStatsFlags.ALL, false).getSearch().getTotal(); collectThenAssertMetrics( telemetry, 2, Map.of( STANDARD_QUERY_COUNT, - equalTo(numStandardIndices), + equalTo(0L), STANDARD_QUERY_TIME, - equalTo(nodeStats1.getQueryTimeInMillis()), + equalTo(0L), TIME_SERIES_QUERY_COUNT, equalTo(numTimeSeriesIndices), TIME_SERIES_QUERY_TIME, - equalTo(nodeStats2.getQueryTimeInMillis() - nodeStats1.getQueryTimeInMillis()), + equalTo(search2.getQueryTimeInMillis() - search1.getQueryTimeInMillis()), TIME_SERIES_FETCH_COUNT, - equalTo(nodeStats2.getFetchCount() - nodeStats1.getFetchCount()), + equalTo(search2.getFetchCount() - search1.getFetchCount()), TIME_SERIES_FETCH_TIME, - equalTo(nodeStats2.getFetchTimeInMillis() - nodeStats1.getFetchTimeInMillis()), + equalTo(search2.getFetchTimeInMillis() - search1.getFetchTimeInMillis()), LOGSDB_QUERY_COUNT, equalTo(0L), @@ -289,41 +273,44 @@ public class IndicesMetricsIT extends ESIntegTestCase { equalTo(0L) ) ); - client().prepareSearch("logs*").setSize(100).get().decRef(); - var nodeStats3 = indicesService.stats(CommonStatsFlags.ALL, false).getSearch().getTotal(); + client(searchNode).prepareSearch("logs*").setPreference(preference).setSize(100).get().decRef(); + var search3 = indicesService.stats(CommonStatsFlags.ALL, false).getSearch().getTotal(); collectThenAssertMetrics( telemetry, 3, Map.of( STANDARD_QUERY_COUNT, - equalTo(numStandardIndices), + equalTo(0L), STANDARD_QUERY_TIME, - equalTo(nodeStats1.getQueryTimeInMillis()), + equalTo(0L), TIME_SERIES_QUERY_COUNT, - equalTo(numTimeSeriesIndices), + equalTo(0L), TIME_SERIES_QUERY_TIME, - equalTo(nodeStats2.getQueryTimeInMillis() - nodeStats1.getQueryTimeInMillis()), + equalTo(0L), LOGSDB_QUERY_COUNT, equalTo(numLogsdbIndices), LOGSDB_QUERY_TIME, - equalTo(nodeStats3.getQueryTimeInMillis() - nodeStats2.getQueryTimeInMillis()), + equalTo(search3.getQueryTimeInMillis() - search2.getQueryTimeInMillis()), LOGSDB_FETCH_COUNT, - equalTo(nodeStats3.getFetchCount() - nodeStats2.getFetchCount()), + equalTo(search3.getFetchCount() - search2.getFetchCount()), LOGSDB_FETCH_TIME, - equalTo(nodeStats3.getFetchTimeInMillis() - nodeStats2.getFetchTimeInMillis()) + equalTo(search3.getFetchTimeInMillis() - search2.getFetchTimeInMillis()) ) ); // search failures - expectThrows(Exception.class, () -> { client().prepareSearch("logs*").setRuntimeMappings(parseMapping(""" - { - "fail_me": { - "type": "long", - "script": {"source": "<>", "lang": "failing_field"} + expectThrows( + Exception.class, + () -> { client(searchNode).prepareSearch("logs*").setPreference(preference).setRuntimeMappings(parseMapping(""" + { + "fail_me": { + "type": "long", + "script": {"source": "<>", "lang": "failing_field"} + } } - } - """)).setQuery(new RangeQueryBuilder("fail_me").gte(0)).setAllowPartialSearchResults(true).get(); }); + """)).setQuery(new RangeQueryBuilder("fail_me").gte(0)).setAllowPartialSearchResults(true).get(); } + ); collectThenAssertMetrics( telemetry, 4, diff --git a/server/src/main/java/org/elasticsearch/monitor/metrics/IndicesMetrics.java b/server/src/main/java/org/elasticsearch/monitor/metrics/IndicesMetrics.java index e07f6908330d..11df8710fad6 100644 --- a/server/src/main/java/org/elasticsearch/monitor/metrics/IndicesMetrics.java +++ b/server/src/main/java/org/elasticsearch/monitor/metrics/IndicesMetrics.java @@ -19,6 +19,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.search.stats.SearchStats; +import org.elasticsearch.index.shard.DocsStats; import org.elasticsearch.index.shard.IllegalIndexShardStateException; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexingStats; @@ -31,6 +32,8 @@ import java.util.ArrayList; import java.util.EnumMap; import java.util.List; import java.util.Map; +import java.util.concurrent.atomic.AtomicLong; +import java.util.function.Supplier; /** * {@link IndicesMetrics} monitors index statistics on an Elasticsearch node and exposes them as metrics @@ -84,75 +87,75 @@ public class IndicesMetrics extends AbstractLifecycleComponent { metrics.add( registry.registerLongGauge( "es.indices." + name + ".query.total", - "total queries of " + name + " indices", + "current queries of " + name + " indices", "unit", - () -> new LongWithAttributes(cache.getOrRefresh().get(indexMode).search.getQueryCount()) + diffGauge(() -> cache.getOrRefresh().get(indexMode).search.getQueryCount()) ) ); metrics.add( registry.registerLongGauge( "es.indices." + name + ".query.time", - "total query time of " + name + " indices", + "current query time of " + name + " indices", "ms", - () -> new LongWithAttributes(cache.getOrRefresh().get(indexMode).search.getQueryTimeInMillis()) + diffGauge(() -> cache.getOrRefresh().get(indexMode).search.getQueryTimeInMillis()) ) ); metrics.add( registry.registerLongGauge( "es.indices." + name + ".query.failure.total", - "total query failures of " + name + " indices", + "current query failures of " + name + " indices", "unit", - () -> new LongWithAttributes(cache.getOrRefresh().get(indexMode).search.getQueryFailure()) + diffGauge(() -> cache.getOrRefresh().get(indexMode).search.getQueryFailure()) ) ); // fetch (count, took, failures) - use gauges as shards can be removed metrics.add( registry.registerLongGauge( "es.indices." + name + ".fetch.total", - "total fetches of " + name + " indices", + "current fetches of " + name + " indices", "unit", - () -> new LongWithAttributes(cache.getOrRefresh().get(indexMode).search.getFetchCount()) + diffGauge(() -> cache.getOrRefresh().get(indexMode).search.getFetchCount()) ) ); metrics.add( registry.registerLongGauge( "es.indices." + name + ".fetch.time", - "total fetch time of " + name + " indices", + "current fetch time of " + name + " indices", "ms", - () -> new LongWithAttributes(cache.getOrRefresh().get(indexMode).search.getFetchTimeInMillis()) + diffGauge(() -> cache.getOrRefresh().get(indexMode).search.getFetchTimeInMillis()) ) ); metrics.add( registry.registerLongGauge( "es.indices." + name + ".fetch.failure.total", - "total fetch failures of " + name + " indices", + "current fetch failures of " + name + " indices", "unit", - () -> new LongWithAttributes(cache.getOrRefresh().get(indexMode).search.getFetchFailure()) + diffGauge(() -> cache.getOrRefresh().get(indexMode).search.getFetchFailure()) ) ); // indexing metrics.add( registry.registerLongGauge( "es.indices." + name + ".indexing.total", - "total indexing operations of " + name + " indices", + "current indexing operations of " + name + " indices", "unit", - () -> new LongWithAttributes(cache.getOrRefresh().get(indexMode).indexing.getIndexCount()) + diffGauge(() -> cache.getOrRefresh().get(indexMode).indexing.getIndexCount()) ) ); metrics.add( registry.registerLongGauge( "es.indices." + name + ".indexing.time", - "total indexing time of " + name + " indices", + "current indexing time of " + name + " indices", "ms", - () -> new LongWithAttributes(cache.getOrRefresh().get(indexMode).indexing.getIndexTime().millis()) + diffGauge(() -> cache.getOrRefresh().get(indexMode).indexing.getIndexTime().millis()) ) ); metrics.add( registry.registerLongGauge( "es.indices." + name + ".indexing.failure.total", - "total indexing failures of " + name + " indices", + "current indexing failures of " + name + " indices", "unit", - () -> new LongWithAttributes(cache.getOrRefresh().get(indexMode).indexing.getIndexFailedCount()) + diffGauge(() -> cache.getOrRefresh().get(indexMode).indexing.getIndexFailedCount()) ) ); } @@ -160,6 +163,15 @@ public class IndicesMetrics extends AbstractLifecycleComponent { return metrics; } + static Supplier diffGauge(Supplier currentValue) { + final AtomicLong counter = new AtomicLong(); + return () -> { + var curr = currentValue.get(); + long prev = counter.getAndUpdate(v -> Math.max(curr, v)); + return new LongWithAttributes(Math.max(0, curr - prev)); + }; + } + @Override protected void doStart() { metrics.addAll(registerAsyncMetrics(registry, stateCache)); @@ -218,22 +230,19 @@ public class IndicesMetrics extends AbstractLifecycleComponent { continue; // skip system indices } final ShardRouting shardRouting = indexShard.routingEntry(); - if (shardRouting.primary() == false) { - continue; // count primaries only - } - if (shardRouting.recoverySource() != null) { - continue; // exclude relocating shards - } final IndexMode indexMode = indexShard.indexSettings().getMode(); final IndexStats indexStats = stats.get(indexMode); - if (shardRouting.shardId().id() == 0) { - indexStats.numIndices++; - } try { - indexStats.numDocs += indexShard.commitStats().getNumDocs(); - indexStats.numBytes += indexShard.storeStats().sizeInBytes(); + if (shardRouting.primary() && shardRouting.recoverySource() == null) { + if (shardRouting.shardId().id() == 0) { + indexStats.numIndices++; + } + final DocsStats docStats = indexShard.docStats(); + indexStats.numDocs += docStats.getCount(); + indexStats.numBytes += docStats.getTotalSizeInBytes(); + indexStats.indexing.add(indexShard.indexingStats().getTotal()); + } indexStats.search.add(indexShard.searchStats().getTotal()); - indexStats.indexing.add(indexShard.indexingStats().getTotal()); } catch (IllegalIndexShardStateException | AlreadyClosedException ignored) { // ignored } From 5b1b889b97912d9c7c84b5433b26af4bfd5a797f Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Tue, 8 Oct 2024 19:54:19 +0200 Subject: [PATCH 57/85] Authenticate to elastic docker registry for resolving wolfi image (#114347) We need to resolve the latest wolfi image from our docker registry --- .buildkite/pipelines/dra-workflow.yml | 2 ++ .buildkite/pipelines/periodic-packaging.template.yml | 3 ++- .buildkite/pipelines/periodic-packaging.yml | 3 ++- .../pipelines/pull-request/packaging-tests-unix-sample.yml | 1 + 4 files changed, 7 insertions(+), 2 deletions(-) diff --git a/.buildkite/pipelines/dra-workflow.yml b/.buildkite/pipelines/dra-workflow.yml index 25477c8541fa..36828a6512db 100644 --- a/.buildkite/pipelines/dra-workflow.yml +++ b/.buildkite/pipelines/dra-workflow.yml @@ -2,6 +2,7 @@ steps: - command: .buildkite/scripts/dra-workflow.sh env: USE_DRA_CREDENTIALS: "true" + USE_PROD_DOCKER_CREDENTIALS: "true" agents: provider: gcp image: family/elasticsearch-ubuntu-2204 @@ -18,4 +19,5 @@ steps: branch: "${BUILDKITE_BRANCH}" env: DRA_WORKFLOW: staging + USE_PROD_DOCKER_CREDENTIALS: "true" if: build.env('DRA_WORKFLOW') == 'staging' diff --git a/.buildkite/pipelines/periodic-packaging.template.yml b/.buildkite/pipelines/periodic-packaging.template.yml index e0da1f46486e..dfedfac9d5b0 100644 --- a/.buildkite/pipelines/periodic-packaging.template.yml +++ b/.buildkite/pipelines/periodic-packaging.template.yml @@ -27,7 +27,8 @@ steps: image: family/elasticsearch-{{matrix.image}} diskSizeGb: 350 machineType: n1-standard-8 - env: {} + env: + USE_PROD_DOCKER_CREDENTIALS: "true" - group: packaging-tests-upgrade steps: $BWC_STEPS - group: packaging-tests-windows diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 5a05d75cf95a..b29747c60617 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -28,7 +28,8 @@ steps: image: family/elasticsearch-{{matrix.image}} diskSizeGb: 350 machineType: n1-standard-8 - env: {} + env: + USE_PROD_DOCKER_CREDENTIALS: "true" - group: packaging-tests-upgrade steps: - label: "{{matrix.image}} / 8.0.1 / packaging-tests-upgrade" diff --git a/.buildkite/pipelines/pull-request/packaging-tests-unix-sample.yml b/.buildkite/pipelines/pull-request/packaging-tests-unix-sample.yml index 98bc61ea3373..97558381d0a0 100644 --- a/.buildkite/pipelines/pull-request/packaging-tests-unix-sample.yml +++ b/.buildkite/pipelines/pull-request/packaging-tests-unix-sample.yml @@ -24,4 +24,5 @@ steps: diskSizeGb: 350 machineType: custom-16-32768 env: + USE_PROD_DOCKER_CREDENTIALS: "true" PACKAGING_TASK: "{{matrix.PACKAGING_TASK}}" From ebe3c0f10d6e5180f2aa9593734e9311d8ce4c48 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 8 Oct 2024 14:04:36 -0400 Subject: [PATCH 58/85] ESQL: Document MV_SLICE limitations (#114162) `MV_SLICE` is useful, but loading values from lucene frequently sorts them so `MV_SLICE` is not as useful as you think it is. It's mostly for after, say, a `SPLIT`. This documents that and adds a link to the section on multivalues. It also moves similar docs to a separate paragraph in the docs for easier reading. --- .../esql/functions/description/mv_first.asciidoc | 8 +++++++- .../esql/functions/description/mv_last.asciidoc | 8 +++++++- .../esql/functions/description/mv_slice.asciidoc | 6 +++++- .../esql/functions/kibana/definition/mv_first.json | 2 +- .../esql/functions/kibana/definition/mv_last.json | 2 +- .../esql/functions/kibana/definition/mv_slice.json | 2 +- docs/reference/esql/functions/kibana/docs/mv_first.md | 6 ------ docs/reference/esql/functions/kibana/docs/mv_last.md | 6 ------ docs/reference/esql/functions/kibana/docs/mv_slice.md | 2 ++ .../xpack/esql/expression/function/FunctionInfo.java | 11 ++++++++--- .../function/scalar/multivalue/MvFirst.java | 4 ++-- .../expression/function/scalar/multivalue/MvLast.java | 4 ++-- .../function/scalar/multivalue/MvSlice.java | 9 ++++++++- 13 files changed, 44 insertions(+), 26 deletions(-) diff --git a/docs/reference/esql/functions/description/mv_first.asciidoc b/docs/reference/esql/functions/description/mv_first.asciidoc index 99223e2c02d9..13c433ce209d 100644 --- a/docs/reference/esql/functions/description/mv_first.asciidoc +++ b/docs/reference/esql/functions/description/mv_first.asciidoc @@ -2,4 +2,10 @@ *Description* -Converts a multivalued expression into a single valued column containing the first value. This is most useful when reading from a function that emits multivalued columns in a known order like <>. The order that <> are read from underlying storage is not guaranteed. It is *frequently* ascending, but don't rely on that. If you need the minimum value use <> instead of `MV_FIRST`. `MV_MIN` has optimizations for sorted values so there isn't a performance benefit to `MV_FIRST`. +Converts a multivalued expression into a single valued column containing the first value. This is most useful when reading from a function that emits multivalued columns in a known order like <>. + +The order that <> are read from +underlying storage is not guaranteed. It is *frequently* ascending, but don't +rely on that. If you need the minimum value use <> instead of +`MV_FIRST`. `MV_MIN` has optimizations for sorted values so there isn't a +performance benefit to `MV_FIRST`. diff --git a/docs/reference/esql/functions/description/mv_last.asciidoc b/docs/reference/esql/functions/description/mv_last.asciidoc index 4b4b4336588d..beba7b5a402c 100644 --- a/docs/reference/esql/functions/description/mv_last.asciidoc +++ b/docs/reference/esql/functions/description/mv_last.asciidoc @@ -2,4 +2,10 @@ *Description* -Converts a multivalue expression into a single valued column containing the last value. This is most useful when reading from a function that emits multivalued columns in a known order like <>. The order that <> are read from underlying storage is not guaranteed. It is *frequently* ascending, but don't rely on that. If you need the maximum value use <> instead of `MV_LAST`. `MV_MAX` has optimizations for sorted values so there isn't a performance benefit to `MV_LAST`. +Converts a multivalue expression into a single valued column containing the last value. This is most useful when reading from a function that emits multivalued columns in a known order like <>. + +The order that <> are read from +underlying storage is not guaranteed. It is *frequently* ascending, but don't +rely on that. If you need the maximum value use <> instead of +`MV_LAST`. `MV_MAX` has optimizations for sorted values so there isn't a +performance benefit to `MV_LAST`. diff --git a/docs/reference/esql/functions/description/mv_slice.asciidoc b/docs/reference/esql/functions/description/mv_slice.asciidoc index 24d3183b6f40..98438ae097fe 100644 --- a/docs/reference/esql/functions/description/mv_slice.asciidoc +++ b/docs/reference/esql/functions/description/mv_slice.asciidoc @@ -2,4 +2,8 @@ *Description* -Returns a subset of the multivalued field using the start and end index values. +Returns a subset of the multivalued field using the start and end index values. This is most useful when reading from a function that emits multivalued columns in a known order like <> or <>. + +The order that <> are read from +underlying storage is not guaranteed. It is *frequently* ascending, but don't +rely on that. diff --git a/docs/reference/esql/functions/kibana/definition/mv_first.json b/docs/reference/esql/functions/kibana/definition/mv_first.json index 480c0af2f091..80e761faafab 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_first.json +++ b/docs/reference/esql/functions/kibana/definition/mv_first.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "mv_first", - "description" : "Converts a multivalued expression into a single valued column containing the\nfirst value. This is most useful when reading from a function that emits\nmultivalued columns in a known order like <>.\n\nThe order that <> are read from\nunderlying storage is not guaranteed. It is *frequently* ascending, but don't\nrely on that. If you need the minimum value use <> instead of\n`MV_FIRST`. `MV_MIN` has optimizations for sorted values so there isn't a\nperformance benefit to `MV_FIRST`.", + "description" : "Converts a multivalued expression into a single valued column containing the\nfirst value. This is most useful when reading from a function that emits\nmultivalued columns in a known order like <>.", "signatures" : [ { "params" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_last.json b/docs/reference/esql/functions/kibana/definition/mv_last.json index 0918e4645426..fb16400f86e6 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_last.json +++ b/docs/reference/esql/functions/kibana/definition/mv_last.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "mv_last", - "description" : "Converts a multivalue expression into a single valued column containing the last\nvalue. This is most useful when reading from a function that emits multivalued\ncolumns in a known order like <>.\n\nThe order that <> are read from\nunderlying storage is not guaranteed. It is *frequently* ascending, but don't\nrely on that. If you need the maximum value use <> instead of\n`MV_LAST`. `MV_MAX` has optimizations for sorted values so there isn't a\nperformance benefit to `MV_LAST`.", + "description" : "Converts a multivalue expression into a single valued column containing the last\nvalue. This is most useful when reading from a function that emits multivalued\ncolumns in a known order like <>.", "signatures" : [ { "params" : [ diff --git a/docs/reference/esql/functions/kibana/definition/mv_slice.json b/docs/reference/esql/functions/kibana/definition/mv_slice.json index dcae77f1545a..399a6145b040 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_slice.json +++ b/docs/reference/esql/functions/kibana/definition/mv_slice.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "mv_slice", - "description" : "Returns a subset of the multivalued field using the start and end index values.", + "description" : "Returns a subset of the multivalued field using the start and end index values.\nThis is most useful when reading from a function that emits multivalued columns\nin a known order like <> or <>.", "signatures" : [ { "params" : [ diff --git a/docs/reference/esql/functions/kibana/docs/mv_first.md b/docs/reference/esql/functions/kibana/docs/mv_first.md index 4faea6edd916..c50ed7d76402 100644 --- a/docs/reference/esql/functions/kibana/docs/mv_first.md +++ b/docs/reference/esql/functions/kibana/docs/mv_first.md @@ -7,12 +7,6 @@ Converts a multivalued expression into a single valued column containing the first value. This is most useful when reading from a function that emits multivalued columns in a known order like <>. -The order that <> are read from -underlying storage is not guaranteed. It is *frequently* ascending, but don't -rely on that. If you need the minimum value use <> instead of -`MV_FIRST`. `MV_MIN` has optimizations for sorted values so there isn't a -performance benefit to `MV_FIRST`. - ``` ROW a="foo;bar;baz" | EVAL first_a = MV_FIRST(SPLIT(a, ";")) diff --git a/docs/reference/esql/functions/kibana/docs/mv_last.md b/docs/reference/esql/functions/kibana/docs/mv_last.md index a8c3bf25eb51..eeefd929c135 100644 --- a/docs/reference/esql/functions/kibana/docs/mv_last.md +++ b/docs/reference/esql/functions/kibana/docs/mv_last.md @@ -7,12 +7,6 @@ Converts a multivalue expression into a single valued column containing the last value. This is most useful when reading from a function that emits multivalued columns in a known order like <>. -The order that <> are read from -underlying storage is not guaranteed. It is *frequently* ascending, but don't -rely on that. If you need the maximum value use <> instead of -`MV_LAST`. `MV_MAX` has optimizations for sorted values so there isn't a -performance benefit to `MV_LAST`. - ``` ROW a="foo;bar;baz" | EVAL last_a = MV_LAST(SPLIT(a, ";")) diff --git a/docs/reference/esql/functions/kibana/docs/mv_slice.md b/docs/reference/esql/functions/kibana/docs/mv_slice.md index 3daf0de930a7..bba7a219960e 100644 --- a/docs/reference/esql/functions/kibana/docs/mv_slice.md +++ b/docs/reference/esql/functions/kibana/docs/mv_slice.md @@ -4,6 +4,8 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ ### MV_SLICE Returns a subset of the multivalued field using the start and end index values. +This is most useful when reading from a function that emits multivalued columns +in a known order like <> or <>. ``` row a = [1, 2, 2, 3] diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java index f275496c6787..1491f5643e4f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/FunctionInfo.java @@ -29,13 +29,18 @@ public @interface FunctionInfo { boolean preview() default false; /** - * The description of the function rendered in {@code META FUNCTIONS} - * and the docs. These should be complete sentences. + * The description of the function rendered in the docs and kibana's + * json files that drive their IDE-like experience. These should be + * complete sentences but can contain asciidoc syntax. It is rendered + * as a single paragraph. */ String description() default ""; /** - * Detailed descriptions of the function rendered in the docs. + * Detailed descriptions of the function rendered in the docs. This is + * rendered as a single paragraph following {@link #description()} in + * the docs and is excluded from Kibana's IDE-like + * experience. It can contain asciidoc syntax. */ String detailedDescription() default ""; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java index 6e76888f72b1..d5d203e7bb3d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java @@ -59,8 +59,8 @@ public class MvFirst extends AbstractMultivalueFunction { description = """ Converts a multivalued expression into a single valued column containing the first value. This is most useful when reading from a function that emits - multivalued columns in a known order like <>. - + multivalued columns in a known order like <>.""", + detailedDescription = """ The order that <> are read from underlying storage is not guaranteed. It is *frequently* ascending, but don't rely on that. If you need the minimum value use <> instead of diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java index 198731ca601f..21487f14817c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java @@ -59,8 +59,8 @@ public class MvLast extends AbstractMultivalueFunction { description = """ Converts a multivalue expression into a single valued column containing the last value. This is most useful when reading from a function that emits multivalued - columns in a known order like <>. - + columns in a known order like <>.""", + detailedDescription = """ The order that <> are read from underlying storage is not guaranteed. It is *frequently* ascending, but don't rely on that. If you need the maximum value use <> instead of diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java index 9846ebe4111c..a829b6f1417b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java @@ -68,7 +68,14 @@ public class MvSlice extends EsqlScalarFunction implements OptionalArgument, Eva "long", "text", "version" }, - description = "Returns a subset of the multivalued field using the start and end index values.", + description = """ + Returns a subset of the multivalued field using the start and end index values. + This is most useful when reading from a function that emits multivalued columns + in a known order like <> or <>.""", + detailedDescription = """ + The order that <> are read from + underlying storage is not guaranteed. It is *frequently* ascending, but don't + rely on that.""", examples = { @Example(file = "ints", tag = "mv_slice_positive"), @Example(file = "ints", tag = "mv_slice_negative") } ) public MvSlice( From a9ea432ac59960babe5cb75c1aa0c9f51b72860b Mon Sep 17 00:00:00 2001 From: Brendan Cully Date: Tue, 8 Oct 2024 11:15:35 -0700 Subject: [PATCH 59/85] Set SlowLog logging to TRACE in tests (#114344) The tests depend on the SlowLog loggers running at TRACE level but were not setting the level themselves. Instead they relied on the SlowLog setting the level to trace internally when it was created. If something else globally adjusted log levels between the time the SlowLog loggers were created and the tests ran, the tests could fail. And in fact, `ScopedSettingsTest.testFallbackToLoggerLevel` was updating the root log level, which had the side effect of updating the SlowLog level. In #112183 SlowLog's log initialization was made static, which opened up its test to failure when ScopedSettingsTest ran before a SlowLog test in the same JVM. I do not know if the intention of the SlowLog is that it overrides the global log level and should always be set at TRACE, in which case this fix is incorrect. It seems surprising, but I don't know why else SlowLog would explicitly initialize itself to TRACE. However, if that was the intention, the code was already at risk due to having no guard against being changed by Loggers.setLevel on an ancestor log. The change in this PR is at least not a regression in that behaviour. It does no longer start out at TRACE however, which is a change in behaviour. --- .../java/org/elasticsearch/index/IndexingSlowLog.java | 5 ----- .../java/org/elasticsearch/index/SearchSlowLog.java | 7 ------- .../org/elasticsearch/index/IndexingSlowLogTests.java | 7 ++++++- .../org/elasticsearch/index/SearchSlowLogTests.java | 10 +++++++++- 4 files changed, 15 insertions(+), 14 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/IndexingSlowLog.java b/server/src/main/java/org/elasticsearch/index/IndexingSlowLog.java index 70ecef375498..3ae4c0eb82ad 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexingSlowLog.java +++ b/server/src/main/java/org/elasticsearch/index/IndexingSlowLog.java @@ -9,13 +9,11 @@ package org.elasticsearch.index; -import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.util.StringBuilders; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.ESLogMessage; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.xcontent.XContentHelper; @@ -92,9 +90,6 @@ public final class IndexingSlowLog implements IndexingOperationListener { ); private static final Logger indexLogger = LogManager.getLogger(INDEX_INDEXING_SLOWLOG_PREFIX + ".index"); - static { - Loggers.setLevel(indexLogger, Level.TRACE); - } private final Index index; diff --git a/server/src/main/java/org/elasticsearch/index/SearchSlowLog.java b/server/src/main/java/org/elasticsearch/index/SearchSlowLog.java index 2a2d650e20aa..e4836a391bfe 100644 --- a/server/src/main/java/org/elasticsearch/index/SearchSlowLog.java +++ b/server/src/main/java/org/elasticsearch/index/SearchSlowLog.java @@ -9,11 +9,9 @@ package org.elasticsearch.index; -import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.common.logging.ESLogMessage; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.core.TimeValue; @@ -47,11 +45,6 @@ public final class SearchSlowLog implements SearchOperationListener { private static final Logger queryLogger = LogManager.getLogger(INDEX_SEARCH_SLOWLOG_PREFIX + ".query"); private static final Logger fetchLogger = LogManager.getLogger(INDEX_SEARCH_SLOWLOG_PREFIX + ".fetch"); - static { - Loggers.setLevel(queryLogger, Level.TRACE); - Loggers.setLevel(fetchLogger, Level.TRACE); - } - private final SlowLogFieldProvider slowLogFieldProvider; public static final Setting INDEX_SEARCH_SLOWLOG_INCLUDE_USER_SETTING = Setting.boolSetting( diff --git a/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java b/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java index 03550ca7fc03..753602e73a30 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java @@ -58,18 +58,23 @@ public class IndexingSlowLogTests extends ESTestCase { static MockAppender appender; static Releasable appenderRelease; static Logger testLogger1 = LogManager.getLogger(IndexingSlowLog.INDEX_INDEXING_SLOWLOG_PREFIX + ".index"); + static Level origLogLevel = testLogger1.getLevel(); @BeforeClass public static void init() throws IllegalAccessException { appender = new MockAppender("trace_appender"); appender.start(); Loggers.addAppender(testLogger1, appender); + + Loggers.setLevel(testLogger1, Level.TRACE); } @AfterClass public static void cleanup() { - appender.stop(); Loggers.removeAppender(testLogger1, appender); + appender.stop(); + + Loggers.setLevel(testLogger1, origLogLevel); } public void testLevelPrecedence() { diff --git a/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java b/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java index dd1790cc786a..50e3269a6b9b 100644 --- a/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java +++ b/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java @@ -50,6 +50,8 @@ public class SearchSlowLogTests extends ESSingleNodeTestCase { static MockAppender appender; static Logger queryLog = LogManager.getLogger(SearchSlowLog.INDEX_SEARCH_SLOWLOG_PREFIX + ".query"); static Logger fetchLog = LogManager.getLogger(SearchSlowLog.INDEX_SEARCH_SLOWLOG_PREFIX + ".fetch"); + static Level origQueryLogLevel = queryLog.getLevel(); + static Level origFetchLogLevel = fetchLog.getLevel(); @BeforeClass public static void init() throws IllegalAccessException { @@ -57,13 +59,19 @@ public class SearchSlowLogTests extends ESSingleNodeTestCase { appender.start(); Loggers.addAppender(queryLog, appender); Loggers.addAppender(fetchLog, appender); + + Loggers.setLevel(queryLog, Level.TRACE); + Loggers.setLevel(fetchLog, Level.TRACE); } @AfterClass public static void cleanup() { - appender.stop(); Loggers.removeAppender(queryLog, appender); Loggers.removeAppender(fetchLog, appender); + appender.stop(); + + Loggers.setLevel(queryLog, origQueryLogLevel); + Loggers.setLevel(fetchLog, origFetchLogLevel); } @Override From 216d2de877828d868b9864ea5371df8239142605 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 8 Oct 2024 14:38:15 -0400 Subject: [PATCH 60/85] ESQL: Weaken test assertion (#114336) Weaken the assertion when testing breakers: it's ok to break while building a block in addition to topn. --- .../org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java index a24bd91206ac..38b3dd4bd7e3 100644 --- a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java @@ -166,7 +166,7 @@ public class HeapAttackIT extends ESRestTestCase { "error", matchesMap().extraOk() .entry("bytes_wanted", greaterThan(1000)) - .entry("reason", matchesRegex("\\[request] Data too large, data for \\[topn] would .+")) + .entry("reason", matchesRegex("\\[request] Data too large, data for \\[(topn|esql_block_factory)] would .+")) .entry("durability", "TRANSIENT") .entry("type", "circuit_breaking_exception") .entry("bytes_limit", greaterThan(1000)) From 19d7028631a284f81c52d7ecdfe152320da3f30e Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Tue, 8 Oct 2024 20:44:24 +0200 Subject: [PATCH 61/85] Run fail formatting yaml test with 1 shard (#114214) --- .../test/aggregations/stats_metric_fail_formatting.yml | 2 ++ muted-tests.yml | 6 ------ 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/stats_metric_fail_formatting.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/stats_metric_fail_formatting.yml index 82371c973407..1ff376eac61d 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/stats_metric_fail_formatting.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/stats_metric_fail_formatting.yml @@ -3,6 +3,8 @@ setup: indices.create: index: test_date body: + settings: + number_of_shards: 1 mappings: properties: date_field: diff --git a/muted-tests.yml b/muted-tests.yml index 696d7a4496e6..ac3730c08b85 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -354,9 +354,6 @@ tests: - class: org.elasticsearch.action.bulk.IncrementalBulkIT method: testIncrementalBulkLowWatermarkBackOff issue: https://github.com/elastic/elasticsearch/issues/114182 -- class: org.elasticsearch.aggregations.AggregationsClientYamlTestSuiteIT - method: test {yaml=aggregations/stats_metric_fail_formatting/fail formatting} - issue: https://github.com/elastic/elasticsearch/issues/114187 - class: org.elasticsearch.xpack.esql.action.EsqlActionBreakerIT issue: https://github.com/elastic/elasticsearch/issues/114194 - class: org.elasticsearch.xpack.ilm.ExplainLifecycleIT @@ -373,9 +370,6 @@ tests: - class: org.elasticsearch.index.SearchSlowLogTests method: testTwoLoggersDifferentLevel issue: https://github.com/elastic/elasticsearch/issues/114301 -- class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT - method: test {p0=aggregations/stats_metric_fail_formatting/fail formatting} - issue: https://github.com/elastic/elasticsearch/issues/114320 - class: org.elasticsearch.xpack.inference.services.cohere.CohereServiceTests method: testInfer_StreamRequest_ErrorResponse issue: https://github.com/elastic/elasticsearch/issues/114327 From 6955bc18a2e5e15f30b89191136596a605a2d808 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Halil=20B=C3=BClent=20Orhon?= Date: Tue, 8 Oct 2024 21:47:37 +0300 Subject: [PATCH 62/85] Fix analyzed wildcard query in simple_query_string when disjunctions is empty (#114264) This change fixes analyzed wildcard query in simple_query_string when disjunctions is empty. Closes #114185 --- docs/changelog/114264.yaml | 5 ++++ .../search/query/SimpleQueryStringIT.java | 26 +++++++++++++++++++ .../search/SimpleQueryStringQueryParser.java | 3 +++ 3 files changed, 34 insertions(+) create mode 100644 docs/changelog/114264.yaml diff --git a/docs/changelog/114264.yaml b/docs/changelog/114264.yaml new file mode 100644 index 000000000000..fe421f642283 --- /dev/null +++ b/docs/changelog/114264.yaml @@ -0,0 +1,5 @@ +pr: 114264 +summary: "Fix analyzed wildcard query in simple_query_string when disjunctions is empty" +area: Search +type: bug +issues: [114185] diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java index 2fe7931d64c8..35f11eb1429b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java @@ -582,6 +582,32 @@ public class SimpleQueryStringIT extends ESIntegTestCase { }); } + public void testSimpleQueryStringWithAnalysisStopWords() throws Exception { + String mapping = Strings.toString( + XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("body") + .field("type", "text") + .field("analyzer", "stop") + .endObject() + .endObject() + .endObject() + ); + + CreateIndexRequestBuilder mappingRequest = indicesAdmin().prepareCreate("test1").setMapping(mapping); + mappingRequest.get(); + indexRandom(true, prepareIndex("test1").setId("1").setSource("body", "Some Text")); + refresh(); + + assertHitCount( + prepareSearch().setQuery( + simpleQueryStringQuery("the* text*").analyzeWildcard(true).defaultOperator(Operator.AND).field("body") + ), + 1 + ); + } + private void assertHits(SearchHits hits, String... ids) { assertThat(hits.getTotalHits().value, equalTo((long) ids.length)); Set hitIds = new HashSet<>(); diff --git a/server/src/main/java/org/elasticsearch/index/search/SimpleQueryStringQueryParser.java b/server/src/main/java/org/elasticsearch/index/search/SimpleQueryStringQueryParser.java index 59394c0a8e6f..5eaf79ad42bd 100644 --- a/server/src/main/java/org/elasticsearch/index/search/SimpleQueryStringQueryParser.java +++ b/server/src/main/java/org/elasticsearch/index/search/SimpleQueryStringQueryParser.java @@ -199,6 +199,9 @@ public class SimpleQueryStringQueryParser extends SimpleQueryParser { if (disjuncts.size() == 1) { return disjuncts.get(0); } + if (disjuncts.size() == 0) { + return null; + } return new DisjunctionMaxQuery(disjuncts, 1.0f); } From 30aef7e99049ffba0d7bd1f2c972ed38f138a548 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Tue, 8 Oct 2024 21:29:11 +0200 Subject: [PATCH 63/85] Revert wolfi image update (#114350) Latest wolfi update caused our wolfi-ess image build to fail --- .../main/java/org/elasticsearch/gradle/internal/DockerBase.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java index 95f279bfa516..ac83a01ffc29 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java @@ -31,7 +31,7 @@ public enum DockerBase { // Chainguard based wolfi image with latest jdk // This is usually updated via renovatebot // spotless:off - WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:90888b190da54062f67f3fef1372eb0ae7d81ea55f5a1f56d748b13e4853d984", + WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:c16d3ad6cebf387e8dd2ad769f54320c4819fbbaa21e729fad087c7ae223b4d0", "-wolfi", "apk" ), From 7bf97da286ee700d9ed52d382674fdc57bfe18e4 Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Tue, 8 Oct 2024 14:32:29 -0500 Subject: [PATCH 64/85] Fixing IpinfoIpDataLookupsTests for Windows (#114340) --- .../geoip/IpinfoIpDataLookupsTests.java | 27 ++++++++++++------- muted-tests.yml | 2 -- 2 files changed, 17 insertions(+), 12 deletions(-) diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java index 905eb027626a..5689693d6c29 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java @@ -13,9 +13,11 @@ import com.maxmind.db.DatabaseRecord; import com.maxmind.db.Networks; import com.maxmind.db.Reader; +import org.apache.lucene.util.Constants; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ESTestCase; @@ -48,17 +50,22 @@ public class IpinfoIpDataLookupsTests extends ESTestCase { private ThreadPool threadPool; private ResourceWatcherService resourceWatcherService; + // a temporary directory that mmdb files can be copied to and read from + private Path tmpDir; + @Before public void setup() { threadPool = new TestThreadPool(ConfigDatabases.class.getSimpleName()); Settings settings = Settings.builder().put("resource.reload.interval.high", TimeValue.timeValueMillis(100)).build(); resourceWatcherService = new ResourceWatcherService(settings, threadPool); + tmpDir = createTempDir(); } @After - public void cleanup() { + public void cleanup() throws IOException { resourceWatcherService.close(); threadPool.shutdownNow(); + IOUtils.rm(tmpDir); } public void testDatabasePropertyInvariants() { @@ -82,7 +89,8 @@ public class IpinfoIpDataLookupsTests extends ESTestCase { } public void testAsn() throws IOException { - Path configDir = createTempDir(); + assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); + Path configDir = tmpDir; copyDatabase("ipinfo/ip_asn_sample.mmdb", configDir.resolve("ip_asn_sample.mmdb")); copyDatabase("ipinfo/asn_sample.mmdb", configDir.resolve("asn_sample.mmdb")); @@ -91,8 +99,7 @@ public class IpinfoIpDataLookupsTests extends ESTestCase { configDatabases.initialize(resourceWatcherService); // this is the 'free' ASN database (sample) - { - DatabaseReaderLazyLoader loader = configDatabases.getDatabase("ip_asn_sample.mmdb"); + try (DatabaseReaderLazyLoader loader = configDatabases.getDatabase("ip_asn_sample.mmdb")) { IpDataLookup lookup = new IpinfoIpDataLookups.Asn(Set.of(Database.Property.values())); Map data = lookup.getData(loader, "5.182.109.0"); assertThat( @@ -110,8 +117,7 @@ public class IpinfoIpDataLookupsTests extends ESTestCase { } // this is the non-free or 'standard' ASN database (sample) - { - DatabaseReaderLazyLoader loader = configDatabases.getDatabase("asn_sample.mmdb"); + try (DatabaseReaderLazyLoader loader = configDatabases.getDatabase("asn_sample.mmdb")) { IpDataLookup lookup = new IpinfoIpDataLookups.Asn(Set.of(Database.Property.values())); Map data = lookup.getData(loader, "23.53.116.0"); assertThat( @@ -132,7 +138,8 @@ public class IpinfoIpDataLookupsTests extends ESTestCase { } public void testAsnInvariants() { - Path configDir = createTempDir(); + assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); + Path configDir = tmpDir; copyDatabase("ipinfo/ip_asn_sample.mmdb", configDir.resolve("ip_asn_sample.mmdb")); copyDatabase("ipinfo/asn_sample.mmdb", configDir.resolve("asn_sample.mmdb")); @@ -168,7 +175,8 @@ public class IpinfoIpDataLookupsTests extends ESTestCase { } public void testCountry() throws IOException { - Path configDir = createTempDir(); + assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS); + Path configDir = tmpDir; copyDatabase("ipinfo/ip_country_sample.mmdb", configDir.resolve("ip_country_sample.mmdb")); GeoIpCache cache = new GeoIpCache(1000); // real cache to test purging of entries upon a reload @@ -176,8 +184,7 @@ public class IpinfoIpDataLookupsTests extends ESTestCase { configDatabases.initialize(resourceWatcherService); // this is the 'free' Country database (sample) - { - DatabaseReaderLazyLoader loader = configDatabases.getDatabase("ip_country_sample.mmdb"); + try (DatabaseReaderLazyLoader loader = configDatabases.getDatabase("ip_country_sample.mmdb")) { IpDataLookup lookup = new IpinfoIpDataLookups.Country(Set.of(Database.Property.values())); Map data = lookup.getData(loader, "4.221.143.168"); assertThat( diff --git a/muted-tests.yml b/muted-tests.yml index ac3730c08b85..88379d4533a5 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -362,8 +362,6 @@ tests: - class: org.elasticsearch.xpack.inference.services.openai.OpenAiServiceTests method: testInfer_StreamRequest issue: https://github.com/elastic/elasticsearch/issues/114232 -- class: org.elasticsearch.ingest.geoip.IpinfoIpDataLookupsTests - issue: https://github.com/elastic/elasticsearch/issues/114266 - class: org.elasticsearch.index.SearchSlowLogTests method: testLevelPrecedence issue: https://github.com/elastic/elasticsearch/issues/114300 From 2ba9bc98c5c88caad7e69143d6cb459c267426aa Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Tue, 8 Oct 2024 15:49:26 -0400 Subject: [PATCH 65/85] Adds an advanced binary quantization format copied from Lucene (#113491) This copies the work from https://github.com/apache/lucene/pull/13651 into Elasticsearch. The main reason for the copy is to simply allow it to be deployed & used in Elasticsearch prior to Elasticsearch upgrading to Lucene 10. At which point, we will then use the format as provided by Lucene. This is currently blocked by two pieces of work: blocked by: https://github.com/elastic/elasticsearch/pull/112933 blocked by: https://github.com/elastic/elasticsearch/pull/113333 After the format is merged, then code will be added for integration tests & integration with Elasticsearch through new index format types in the API. --- .../index/codec/vectors/BQSpaceUtils.java | 78 + .../index/codec/vectors/BQVectorUtils.java | 97 + .../vectors/BinarizedByteVectorValues.java | 58 + .../index/codec/vectors/BinaryQuantizer.java | 385 ++++ .../vectors/ES816BinaryFlatVectorsScorer.java | 273 +++ .../ES816BinaryQuantizedVectorsFormat.java | 75 + .../ES816BinaryQuantizedVectorsReader.java | 412 ++++ .../ES816BinaryQuantizedVectorsWriter.java | 987 +++++++++ ...ES816HnswBinaryQuantizedVectorsFormat.java | 144 ++ .../vectors/OffHeapBinarizedVectorValues.java | 456 ++++ ...RandomAccessBinarizedByteVectorValues.java | 70 + .../org.apache.lucene.codecs.KnnVectorsFormat | 2 + .../codec/vectors/BQVectorUtilsTests.java | 90 + .../vectors/BinaryQuantizationTests.java | 1856 +++++++++++++++++ .../ES816BinaryFlatVectorsScorerTests.java | 1746 ++++++++++++++++ ...S816BinaryQuantizedVectorsFormatTests.java | 175 ++ ...HnswBinaryQuantizedVectorsFormatTests.java | 126 ++ 17 files changed, 7030 insertions(+) create mode 100644 server/src/main/java/org/elasticsearch/index/codec/vectors/BQSpaceUtils.java create mode 100644 server/src/main/java/org/elasticsearch/index/codec/vectors/BQVectorUtils.java create mode 100644 server/src/main/java/org/elasticsearch/index/codec/vectors/BinarizedByteVectorValues.java create mode 100644 server/src/main/java/org/elasticsearch/index/codec/vectors/BinaryQuantizer.java create mode 100644 server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorer.java create mode 100644 server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsFormat.java create mode 100644 server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsReader.java create mode 100644 server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsWriter.java create mode 100644 server/src/main/java/org/elasticsearch/index/codec/vectors/ES816HnswBinaryQuantizedVectorsFormat.java create mode 100644 server/src/main/java/org/elasticsearch/index/codec/vectors/OffHeapBinarizedVectorValues.java create mode 100644 server/src/main/java/org/elasticsearch/index/codec/vectors/RandomAccessBinarizedByteVectorValues.java create mode 100644 server/src/test/java/org/elasticsearch/index/codec/vectors/BQVectorUtilsTests.java create mode 100644 server/src/test/java/org/elasticsearch/index/codec/vectors/BinaryQuantizationTests.java create mode 100644 server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorerTests.java create mode 100644 server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsFormatTests.java create mode 100644 server/src/test/java/org/elasticsearch/index/codec/vectors/ES816HnswBinaryQuantizedVectorsFormatTests.java diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/BQSpaceUtils.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/BQSpaceUtils.java new file mode 100644 index 000000000000..68363b5926a6 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/BQSpaceUtils.java @@ -0,0 +1,78 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2024 Elasticsearch B.V. + */ +package org.elasticsearch.index.codec.vectors; + +/** Utility class for quantization calculations */ +public class BQSpaceUtils { + + public static final short B_QUERY = 4; + // the first four bits masked + private static final int B_QUERY_MASK = 15; + + /** + * Copied from Lucene, replace with Lucene's implementation sometime after Lucene 10 + * @param q the query vector, assumed to be half-byte quantized with values between 0 and 15 + * @param dimensions the number of dimensions in the query vector + * @param quantQueryByte the byte array to store the transposed query vector + */ + public static void transposeBin(byte[] q, int dimensions, byte[] quantQueryByte) { + // TODO: rewrite this in Panama Vector API + int qOffset = 0; + final byte[] v1 = new byte[4]; + final byte[] v = new byte[32]; + for (int i = 0; i < dimensions; i += 32) { + // for every four bytes we shift left (with remainder across those bytes) + for (int j = 0; j < v.length; j += 4) { + v[j] = (byte) (q[qOffset + j] << B_QUERY | ((q[qOffset + j] >>> B_QUERY) & B_QUERY_MASK)); + v[j + 1] = (byte) (q[qOffset + j + 1] << B_QUERY | ((q[qOffset + j + 1] >>> B_QUERY) & B_QUERY_MASK)); + v[j + 2] = (byte) (q[qOffset + j + 2] << B_QUERY | ((q[qOffset + j + 2] >>> B_QUERY) & B_QUERY_MASK)); + v[j + 3] = (byte) (q[qOffset + j + 3] << B_QUERY | ((q[qOffset + j + 3] >>> B_QUERY) & B_QUERY_MASK)); + } + for (int j = 0; j < B_QUERY; j++) { + moveMaskEpi8Byte(v, v1); + for (int k = 0; k < 4; k++) { + quantQueryByte[(B_QUERY - j - 1) * (dimensions / 8) + i / 8 + k] = v1[k]; + v1[k] = 0; + } + for (int k = 0; k < v.length; k += 4) { + v[k] = (byte) (v[k] + v[k]); + v[k + 1] = (byte) (v[k + 1] + v[k + 1]); + v[k + 2] = (byte) (v[k + 2] + v[k + 2]); + v[k + 3] = (byte) (v[k + 3] + v[k + 3]); + } + } + qOffset += 32; + } + } + + private static void moveMaskEpi8Byte(byte[] v, byte[] v1b) { + int m = 0; + for (int k = 0; k < v.length; k++) { + if ((v[k] & 0b10000000) == 0b10000000) { + v1b[m] |= 0b00000001; + } + if (k % 8 == 7) { + m++; + } else { + v1b[m] <<= 1; + } + } + } +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/BQVectorUtils.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/BQVectorUtils.java new file mode 100644 index 000000000000..3d2acb533e26 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/BQVectorUtils.java @@ -0,0 +1,97 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2024 Elasticsearch B.V. + */ +package org.elasticsearch.index.codec.vectors; + +import org.apache.lucene.util.ArrayUtil; +import org.apache.lucene.util.BitUtil; +import org.apache.lucene.util.VectorUtil; + +/** Utility class for vector quantization calculations */ +public class BQVectorUtils { + private static final float EPSILON = 1e-4f; + + public static boolean isUnitVector(float[] v) { + double l1norm = VectorUtil.dotProduct(v, v); + return Math.abs(l1norm - 1.0d) <= EPSILON; + } + + public static int discretize(int value, int bucket) { + return ((value + (bucket - 1)) / bucket) * bucket; + } + + public static float[] pad(float[] vector, int dimensions) { + if (vector.length >= dimensions) { + return vector; + } + return ArrayUtil.growExact(vector, dimensions); + } + + public static byte[] pad(byte[] vector, int dimensions) { + if (vector.length >= dimensions) { + return vector; + } + return ArrayUtil.growExact(vector, dimensions); + } + + /** + * Copied from Lucene, replace with Lucene's implementation sometime after Lucene 10 + * @param d the byte array to count the number of set bits in + * @return count of flipped bits in the byte array + */ + public static int popcount(byte[] d) { + int r = 0; + int cnt = 0; + for (final int upperBound = d.length & -Integer.BYTES; r < upperBound; r += Integer.BYTES) { + cnt += Integer.bitCount((int) BitUtil.VH_NATIVE_INT.get(d, r)); + } + for (; r < d.length; r++) { + cnt += Integer.bitCount(d[r] & 0xFF); + } + return cnt; + } + + // TODO: move to VectorUtil & vectorize? + public static void divideInPlace(float[] a, float b) { + for (int j = 0; j < a.length; j++) { + a[j] /= b; + } + } + + public static float[] subtract(float[] a, float[] b) { + float[] result = new float[a.length]; + subtract(a, b, result); + return result; + } + + public static void subtractInPlace(float[] target, float[] other) { + subtract(target, other, target); + } + + private static void subtract(float[] a, float[] b, float[] result) { + for (int j = 0; j < a.length; j++) { + result[j] = a[j] - b[j]; + } + } + + public static float norm(float[] vector) { + float magnitude = VectorUtil.dotProduct(vector, vector); + return (float) Math.sqrt(magnitude); + } +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/BinarizedByteVectorValues.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/BinarizedByteVectorValues.java new file mode 100644 index 000000000000..73dd4273a794 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/BinarizedByteVectorValues.java @@ -0,0 +1,58 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2024 Elasticsearch B.V. + */ +package org.elasticsearch.index.codec.vectors; + +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.VectorScorer; + +import java.io.IOException; + +/** + * Copied from Lucene, replace with Lucene's implementation sometime after Lucene 10 + */ +public abstract class BinarizedByteVectorValues extends DocIdSetIterator { + + public abstract float[] getCorrectiveTerms(); + + public abstract byte[] vectorValue() throws IOException; + + /** Return the dimension of the vectors */ + public abstract int dimension(); + + /** + * Return the number of vectors for this field. + * + * @return the number of vectors returned by this iterator + */ + public abstract int size(); + + @Override + public final long cost() { + return size(); + } + + /** + * Return a {@link VectorScorer} for the given query vector. + * + * @param query the query vector + * @return a {@link VectorScorer} instance or null + */ + public abstract VectorScorer scorer(float[] query) throws IOException; +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/BinaryQuantizer.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/BinaryQuantizer.java new file mode 100644 index 000000000000..192fb9092ac3 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/BinaryQuantizer.java @@ -0,0 +1,385 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2024 Elasticsearch B.V. + */ +package org.elasticsearch.index.codec.vectors; + +import org.apache.lucene.index.VectorSimilarityFunction; +import org.apache.lucene.util.ArrayUtil; +import org.apache.lucene.util.VectorUtil; + +import static org.apache.lucene.index.VectorSimilarityFunction.COSINE; +import static org.apache.lucene.index.VectorSimilarityFunction.EUCLIDEAN; +import static org.elasticsearch.index.codec.vectors.BQVectorUtils.isUnitVector; + +/** + * Copied from Lucene, replace with Lucene's implementation sometime after Lucene 10 + * Quantized that quantizes raw vector values to binary. The algorithm is based on the paper RaBitQ. + */ +public class BinaryQuantizer { + private final int discretizedDimensions; + + private final VectorSimilarityFunction similarityFunction; + private final float sqrtDimensions; + + public BinaryQuantizer(int dimensions, int discretizedDimensions, VectorSimilarityFunction similarityFunction) { + if (dimensions <= 0) { + throw new IllegalArgumentException("dimensions must be > 0 but was: " + dimensions); + } + assert discretizedDimensions % 64 == 0 : "discretizedDimensions must be a multiple of 64 but was: " + discretizedDimensions; + this.discretizedDimensions = discretizedDimensions; + this.similarityFunction = similarityFunction; + this.sqrtDimensions = (float) Math.sqrt(dimensions); + } + + BinaryQuantizer(int dimensions, VectorSimilarityFunction similarityFunction) { + this(dimensions, dimensions, similarityFunction); + } + + private static void removeSignAndDivide(float[] a, float divisor) { + for (int i = 0; i < a.length; i++) { + a[i] = Math.abs(a[i]) / divisor; + } + } + + private static float sumAndNormalize(float[] a, float norm) { + float aDivided = 0f; + + for (int i = 0; i < a.length; i++) { + aDivided += a[i]; + } + + aDivided = aDivided / norm; + if (Float.isFinite(aDivided) == false) { + aDivided = 0.8f; // can be anything + } + + return aDivided; + } + + private static void packAsBinary(float[] vector, byte[] packedVector) { + for (int h = 0; h < vector.length; h += 8) { + byte result = 0; + int q = 0; + for (int i = 7; i >= 0; i--) { + if (vector[h + i] > 0) { + result |= (byte) (1 << q); + } + q++; + } + packedVector[h / 8] = result; + } + } + + public VectorSimilarityFunction getSimilarity() { + return this.similarityFunction; + } + + private record SubspaceOutput(float projection) {} + + private SubspaceOutput generateSubSpace(float[] vector, float[] centroid, byte[] quantizedVector) { + // typically no-op if dimensions/64 + float[] paddedCentroid = BQVectorUtils.pad(centroid, discretizedDimensions); + float[] paddedVector = BQVectorUtils.pad(vector, discretizedDimensions); + + BQVectorUtils.subtractInPlace(paddedVector, paddedCentroid); + + // The inner product between the data vector and the quantized data vector + float norm = BQVectorUtils.norm(paddedVector); + + packAsBinary(paddedVector, quantizedVector); + + removeSignAndDivide(paddedVector, sqrtDimensions); + float projection = sumAndNormalize(paddedVector, norm); + + return new SubspaceOutput(projection); + } + + record SubspaceOutputMIP(float OOQ, float normOC, float oDotC) {} + + private SubspaceOutputMIP generateSubSpaceMIP(float[] vector, float[] centroid, byte[] quantizedVector) { + + // typically no-op if dimensions/64 + float[] paddedCentroid = BQVectorUtils.pad(centroid, discretizedDimensions); + float[] paddedVector = BQVectorUtils.pad(vector, discretizedDimensions); + + float oDotC = VectorUtil.dotProduct(paddedVector, paddedCentroid); + BQVectorUtils.subtractInPlace(paddedVector, paddedCentroid); + + float normOC = BQVectorUtils.norm(paddedVector); + packAsBinary(paddedVector, quantizedVector); + BQVectorUtils.divideInPlace(paddedVector, normOC); // OmC / norm(OmC) + + float OOQ = computerOOQ(vector.length, paddedVector, quantizedVector); + + return new SubspaceOutputMIP(OOQ, normOC, oDotC); + } + + private float computerOOQ(int originalLength, float[] normOMinusC, byte[] packedBinaryVector) { + float OOQ = 0f; + for (int j = 0; j < originalLength / 8; j++) { + for (int r = 0; r < 8; r++) { + int sign = ((packedBinaryVector[j] >> (7 - r)) & 0b00000001); + OOQ += (normOMinusC[j * 8 + r] * (2 * sign - 1)); + } + } + OOQ = OOQ / sqrtDimensions; + return OOQ; + } + + private static float[] range(float[] q) { + float vl = 1e20f; + float vr = -1e20f; + for (int i = 0; i < q.length; i++) { + if (q[i] < vl) { + vl = q[i]; + } + if (q[i] > vr) { + vr = q[i]; + } + } + + return new float[] { vl, vr }; + } + + /** Results of quantizing a vector for both querying and indexing */ + public record QueryAndIndexResults(float[] indexFeatures, QueryFactors queryFeatures) {} + + public QueryAndIndexResults quantizeQueryAndIndex(float[] vector, byte[] indexDestination, byte[] queryDestination, float[] centroid) { + assert similarityFunction != COSINE || isUnitVector(vector); + assert similarityFunction != COSINE || isUnitVector(centroid); + assert this.discretizedDimensions == BQVectorUtils.discretize(vector.length, 64); + + if (this.discretizedDimensions != indexDestination.length * 8) { + throw new IllegalArgumentException( + "vector and quantized vector destination must be compatible dimensions: " + + BQVectorUtils.discretize(vector.length, 64) + + " [ " + + this.discretizedDimensions + + " ]" + + "!= " + + indexDestination.length + + " * 8" + ); + } + + if (this.discretizedDimensions != (queryDestination.length * 8) / BQSpaceUtils.B_QUERY) { + throw new IllegalArgumentException( + "vector and quantized vector destination must be compatible dimensions: " + + vector.length + + " [ " + + this.discretizedDimensions + + " ]" + + "!= (" + + queryDestination.length + + " * 8) / " + + BQSpaceUtils.B_QUERY + ); + } + + if (vector.length != centroid.length) { + throw new IllegalArgumentException( + "vector and centroid dimensions must be the same: " + vector.length + "!= " + centroid.length + ); + } + vector = ArrayUtil.copyArray(vector); + float distToC = VectorUtil.squareDistance(vector, centroid); + // only need vdotc for dot-products similarity, but not for euclidean + float vDotC = similarityFunction != EUCLIDEAN ? VectorUtil.dotProduct(vector, centroid) : 0f; + BQVectorUtils.subtractInPlace(vector, centroid); + // both euclidean and dot-product need the norm of the vector, just at different times + float normVmC = BQVectorUtils.norm(vector); + // quantize for index + packAsBinary(BQVectorUtils.pad(vector, discretizedDimensions), indexDestination); + if (similarityFunction != EUCLIDEAN) { + BQVectorUtils.divideInPlace(vector, normVmC); + } + + // Quantize for query + float[] range = range(vector); + float lower = range[0]; + float upper = range[1]; + // Δ := (𝑣𝑟 − 𝑣𝑙)/(2𝐵𝑞 − 1) + float width = (upper - lower) / ((1 << BQSpaceUtils.B_QUERY) - 1); + + QuantResult quantResult = quantize(vector, lower, width); + byte[] byteQuery = quantResult.result(); + + // q¯ = Δ · q¯𝑢 + 𝑣𝑙 · 1𝐷 + // q¯ is an approximation of q′ (scalar quantized approximation) + // FIXME: vectors need to be padded but that's expensive; update transponseBin to deal + byteQuery = BQVectorUtils.pad(byteQuery, discretizedDimensions); + BQSpaceUtils.transposeBin(byteQuery, discretizedDimensions, queryDestination); + QueryFactors factors = new QueryFactors(quantResult.quantizedSum, distToC, lower, width, normVmC, vDotC); + final float[] indexCorrections; + if (similarityFunction == EUCLIDEAN) { + indexCorrections = new float[2]; + indexCorrections[0] = (float) Math.sqrt(distToC); + removeSignAndDivide(vector, sqrtDimensions); + indexCorrections[1] = sumAndNormalize(vector, normVmC); + } else { + indexCorrections = new float[3]; + indexCorrections[0] = computerOOQ(vector.length, vector, indexDestination); + indexCorrections[1] = normVmC; + indexCorrections[2] = vDotC; + } + return new QueryAndIndexResults(indexCorrections, factors); + } + + public float[] quantizeForIndex(float[] vector, byte[] destination, float[] centroid) { + assert similarityFunction != COSINE || isUnitVector(vector); + assert similarityFunction != COSINE || isUnitVector(centroid); + assert this.discretizedDimensions == BQVectorUtils.discretize(vector.length, 64); + + if (this.discretizedDimensions != destination.length * 8) { + throw new IllegalArgumentException( + "vector and quantized vector destination must be compatible dimensions: " + + BQVectorUtils.discretize(vector.length, 64) + + " [ " + + this.discretizedDimensions + + " ]" + + "!= " + + destination.length + + " * 8" + ); + } + + if (vector.length != centroid.length) { + throw new IllegalArgumentException( + "vector and centroid dimensions must be the same: " + vector.length + "!= " + centroid.length + ); + } + + float[] corrections; + + // FIXME: make a copy of vector so we don't overwrite it here? + // ... (could trade subtractInPlace w subtract in genSubSpace) + vector = ArrayUtil.copyArray(vector); + + switch (similarityFunction) { + case EUCLIDEAN: + float distToCentroid = (float) Math.sqrt(VectorUtil.squareDistance(vector, centroid)); + + SubspaceOutput subspaceOutput = generateSubSpace(vector, centroid, destination); + corrections = new float[2]; + // FIXME: quantize these values so we are passing back 1 byte values for all three of these + corrections[0] = distToCentroid; + corrections[1] = subspaceOutput.projection(); + break; + case MAXIMUM_INNER_PRODUCT: + case COSINE: + case DOT_PRODUCT: + SubspaceOutputMIP subspaceOutputMIP = generateSubSpaceMIP(vector, centroid, destination); + corrections = new float[3]; + // FIXME: quantize these values so we are passing back 1 byte values for all three of these + corrections[0] = subspaceOutputMIP.OOQ(); + corrections[1] = subspaceOutputMIP.normOC(); + corrections[2] = subspaceOutputMIP.oDotC(); + break; + default: + throw new UnsupportedOperationException("Unsupported similarity function: " + similarityFunction); + } + + return corrections; + } + + private record QuantResult(byte[] result, int quantizedSum) {} + + private static QuantResult quantize(float[] vector, float lower, float width) { + // FIXME: speed up with panama? and/or use existing scalar quantization utils in Lucene? + byte[] result = new byte[vector.length]; + float oneOverWidth = 1.0f / width; + int sumQ = 0; + for (int i = 0; i < vector.length; i++) { + byte res = (byte) ((vector[i] - lower) * oneOverWidth); + result[i] = res; + sumQ += res; + } + + return new QuantResult(result, sumQ); + } + + /** Factors for quantizing query */ + public record QueryFactors(int quantizedSum, float distToC, float lower, float width, float normVmC, float vDotC) {} + + public QueryFactors quantizeForQuery(float[] vector, byte[] destination, float[] centroid) { + assert similarityFunction != COSINE || isUnitVector(vector); + assert similarityFunction != COSINE || isUnitVector(centroid); + assert this.discretizedDimensions == BQVectorUtils.discretize(vector.length, 64); + + if (this.discretizedDimensions != (destination.length * 8) / BQSpaceUtils.B_QUERY) { + throw new IllegalArgumentException( + "vector and quantized vector destination must be compatible dimensions: " + + vector.length + + " [ " + + this.discretizedDimensions + + " ]" + + "!= (" + + destination.length + + " * 8) / " + + BQSpaceUtils.B_QUERY + ); + } + + if (vector.length != centroid.length) { + throw new IllegalArgumentException( + "vector and centroid dimensions must be the same: " + vector.length + "!= " + centroid.length + ); + } + + float distToC = VectorUtil.squareDistance(vector, centroid); + + // FIXME: make a copy of vector so we don't overwrite it here? + // ... (could subtractInPlace but the passed vector is modified) <<--- + float[] vmC = BQVectorUtils.subtract(vector, centroid); + + // FIXME: should other similarity functions behave like MIP on query like COSINE + float normVmC = 0f; + if (similarityFunction != EUCLIDEAN) { + normVmC = BQVectorUtils.norm(vmC); + BQVectorUtils.divideInPlace(vmC, normVmC); + } + float[] range = range(vmC); + float lower = range[0]; + float upper = range[1]; + // Δ := (𝑣𝑟 − 𝑣𝑙)/(2𝐵𝑞 − 1) + float width = (upper - lower) / ((1 << BQSpaceUtils.B_QUERY) - 1); + + QuantResult quantResult = quantize(vmC, lower, width); + byte[] byteQuery = quantResult.result(); + + // q¯ = Δ · q¯𝑢 + 𝑣𝑙 · 1𝐷 + // q¯ is an approximation of q′ (scalar quantized approximation) + // FIXME: vectors need to be padded but that's expensive; update transponseBin to deal + byteQuery = BQVectorUtils.pad(byteQuery, discretizedDimensions); + BQSpaceUtils.transposeBin(byteQuery, discretizedDimensions, destination); + + QueryFactors factors; + if (similarityFunction != EUCLIDEAN) { + float vDotC = VectorUtil.dotProduct(vector, centroid); + // FIXME: quantize the corrections as well so we store less + factors = new QueryFactors(quantResult.quantizedSum, distToC, lower, width, normVmC, vDotC); + } else { + // FIXME: quantize the corrections as well so we store less + factors = new QueryFactors(quantResult.quantizedSum, distToC, lower, width, 0f, 0f); + } + + return factors; + } +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorer.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorer.java new file mode 100644 index 000000000000..78fa28270909 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorer.java @@ -0,0 +1,273 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2024 Elasticsearch B.V. + */ +package org.elasticsearch.index.codec.vectors; + +import org.apache.lucene.codecs.hnsw.FlatVectorsScorer; +import org.apache.lucene.index.VectorSimilarityFunction; +import org.apache.lucene.util.ArrayUtil; +import org.apache.lucene.util.VectorUtil; +import org.apache.lucene.util.hnsw.RandomAccessVectorValues; +import org.apache.lucene.util.hnsw.RandomVectorScorer; +import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; +import org.elasticsearch.simdvec.ESVectorUtil; + +import java.io.IOException; + +import static org.apache.lucene.index.VectorSimilarityFunction.COSINE; +import static org.apache.lucene.index.VectorSimilarityFunction.EUCLIDEAN; +import static org.apache.lucene.index.VectorSimilarityFunction.MAXIMUM_INNER_PRODUCT; + +/** Vector scorer over binarized vector values */ +public class ES816BinaryFlatVectorsScorer implements FlatVectorsScorer { + private final FlatVectorsScorer nonQuantizedDelegate; + + public ES816BinaryFlatVectorsScorer(FlatVectorsScorer nonQuantizedDelegate) { + this.nonQuantizedDelegate = nonQuantizedDelegate; + } + + @Override + public RandomVectorScorerSupplier getRandomVectorScorerSupplier( + VectorSimilarityFunction similarityFunction, + RandomAccessVectorValues vectorValues + ) throws IOException { + if (vectorValues instanceof RandomAccessBinarizedByteVectorValues) { + throw new UnsupportedOperationException( + "getRandomVectorScorerSupplier(VectorSimilarityFunction,RandomAccessVectorValues) not implemented for binarized format" + ); + } + return nonQuantizedDelegate.getRandomVectorScorerSupplier(similarityFunction, vectorValues); + } + + @Override + public RandomVectorScorer getRandomVectorScorer( + VectorSimilarityFunction similarityFunction, + RandomAccessVectorValues vectorValues, + float[] target + ) throws IOException { + if (vectorValues instanceof RandomAccessBinarizedByteVectorValues binarizedVectors) { + BinaryQuantizer quantizer = binarizedVectors.getQuantizer(); + float[] centroid = binarizedVectors.getCentroid(); + // FIXME: precompute this once? + int discretizedDimensions = BQVectorUtils.discretize(target.length, 64); + if (similarityFunction == COSINE) { + float[] copy = ArrayUtil.copyOfSubArray(target, 0, target.length); + VectorUtil.l2normalize(copy); + target = copy; + } + byte[] quantized = new byte[BQSpaceUtils.B_QUERY * discretizedDimensions / 8]; + BinaryQuantizer.QueryFactors factors = quantizer.quantizeForQuery(target, quantized, centroid); + BinaryQueryVector queryVector = new BinaryQueryVector(quantized, factors); + return new BinarizedRandomVectorScorer(queryVector, binarizedVectors, similarityFunction); + } + return nonQuantizedDelegate.getRandomVectorScorer(similarityFunction, vectorValues, target); + } + + @Override + public RandomVectorScorer getRandomVectorScorer( + VectorSimilarityFunction similarityFunction, + RandomAccessVectorValues vectorValues, + byte[] target + ) throws IOException { + return nonQuantizedDelegate.getRandomVectorScorer(similarityFunction, vectorValues, target); + } + + RandomVectorScorerSupplier getRandomVectorScorerSupplier( + VectorSimilarityFunction similarityFunction, + ES816BinaryQuantizedVectorsWriter.OffHeapBinarizedQueryVectorValues scoringVectors, + RandomAccessBinarizedByteVectorValues targetVectors + ) { + return new BinarizedRandomVectorScorerSupplier(scoringVectors, targetVectors, similarityFunction); + } + + @Override + public String toString() { + return "ES816BinaryFlatVectorsScorer(nonQuantizedDelegate=" + nonQuantizedDelegate + ")"; + } + + /** Vector scorer supplier over binarized vector values */ + static class BinarizedRandomVectorScorerSupplier implements RandomVectorScorerSupplier { + private final ES816BinaryQuantizedVectorsWriter.OffHeapBinarizedQueryVectorValues queryVectors; + private final RandomAccessBinarizedByteVectorValues targetVectors; + private final VectorSimilarityFunction similarityFunction; + + BinarizedRandomVectorScorerSupplier( + ES816BinaryQuantizedVectorsWriter.OffHeapBinarizedQueryVectorValues queryVectors, + RandomAccessBinarizedByteVectorValues targetVectors, + VectorSimilarityFunction similarityFunction + ) { + this.queryVectors = queryVectors; + this.targetVectors = targetVectors; + this.similarityFunction = similarityFunction; + } + + @Override + public RandomVectorScorer scorer(int ord) throws IOException { + byte[] vector = queryVectors.vectorValue(ord); + int quantizedSum = queryVectors.sumQuantizedValues(ord); + float distanceToCentroid = queryVectors.getCentroidDistance(ord); + float lower = queryVectors.getLower(ord); + float width = queryVectors.getWidth(ord); + float normVmC = 0f; + float vDotC = 0f; + if (similarityFunction != EUCLIDEAN) { + normVmC = queryVectors.getNormVmC(ord); + vDotC = queryVectors.getVDotC(ord); + } + BinaryQueryVector binaryQueryVector = new BinaryQueryVector( + vector, + new BinaryQuantizer.QueryFactors(quantizedSum, distanceToCentroid, lower, width, normVmC, vDotC) + ); + return new BinarizedRandomVectorScorer(binaryQueryVector, targetVectors, similarityFunction); + } + + @Override + public RandomVectorScorerSupplier copy() throws IOException { + return new BinarizedRandomVectorScorerSupplier(queryVectors.copy(), targetVectors.copy(), similarityFunction); + } + } + + /** A binarized query representing its quantized form along with factors */ + public record BinaryQueryVector(byte[] vector, BinaryQuantizer.QueryFactors factors) {} + + /** Vector scorer over binarized vector values */ + public static class BinarizedRandomVectorScorer extends RandomVectorScorer.AbstractRandomVectorScorer { + private final BinaryQueryVector queryVector; + private final RandomAccessBinarizedByteVectorValues targetVectors; + private final VectorSimilarityFunction similarityFunction; + + private final float sqrtDimensions; + + public BinarizedRandomVectorScorer( + BinaryQueryVector queryVectors, + RandomAccessBinarizedByteVectorValues targetVectors, + VectorSimilarityFunction similarityFunction + ) { + super(targetVectors); + this.queryVector = queryVectors; + this.targetVectors = targetVectors; + this.similarityFunction = similarityFunction; + // FIXME: precompute this once? + this.sqrtDimensions = (float) Utils.constSqrt(targetVectors.dimension()); + } + + // FIXME: utils class; pull this out + private static class Utils { + public static double sqrtNewtonRaphson(double x, double curr, double prev) { + return (curr == prev) ? curr : sqrtNewtonRaphson(x, 0.5 * (curr + x / curr), curr); + } + + public static double constSqrt(double x) { + return x >= 0 && Double.isInfinite(x) == false ? sqrtNewtonRaphson(x, x, 0) : Double.NaN; + } + } + + @Override + public float score(int targetOrd) throws IOException { + // FIXME: implement fastscan in the future? + + byte[] quantizedQuery = queryVector.vector(); + int quantizedSum = queryVector.factors().quantizedSum(); + float lower = queryVector.factors().lower(); + float width = queryVector.factors().width(); + float distanceToCentroid = queryVector.factors().distToC(); + if (similarityFunction == EUCLIDEAN) { + return euclideanScore(targetOrd, sqrtDimensions, quantizedQuery, distanceToCentroid, lower, quantizedSum, width); + } + + float vmC = queryVector.factors().normVmC(); + float vDotC = queryVector.factors().vDotC(); + float cDotC = targetVectors.getCentroidDP(); + byte[] binaryCode = targetVectors.vectorValue(targetOrd); + float ooq = targetVectors.getOOQ(targetOrd); + float normOC = targetVectors.getNormOC(targetOrd); + float oDotC = targetVectors.getODotC(targetOrd); + + float qcDist = ESVectorUtil.ipByteBinByte(quantizedQuery, binaryCode); + + // FIXME: pre-compute these only once for each target vector + // ... pull this out or use a similar cache mechanism as do in score + float xbSum = (float) BQVectorUtils.popcount(binaryCode); + final float dist; + // If ||o-c|| == 0, so, it's ok to throw the rest of the equation away + // and simply use `oDotC + vDotC - cDotC` as centroid == doc vector + if (normOC == 0 || ooq == 0) { + dist = oDotC + vDotC - cDotC; + } else { + // If ||o-c|| != 0, we should assume that `ooq` is finite + assert Float.isFinite(ooq); + float estimatedDot = (2 * width / sqrtDimensions * qcDist + 2 * lower / sqrtDimensions * xbSum - width / sqrtDimensions + * quantizedSum - sqrtDimensions * lower) / ooq; + dist = vmC * normOC * estimatedDot + oDotC + vDotC - cDotC; + } + assert Float.isFinite(dist); + + // TODO: this is useful for mandatory rescoring by accounting for bias + // However, for just oversampling & rescoring, it isn't strictly useful. + // We should consider utilizing this bias in the future to determine which vectors need to + // be rescored + // float ooqSqr = (float) Math.pow(ooq, 2); + // float errorBound = (float) (normVmC * normOC * (maxX1 * Math.sqrt((1 - ooqSqr) / ooqSqr))); + // float score = dist - errorBound; + if (similarityFunction == MAXIMUM_INNER_PRODUCT) { + return VectorUtil.scaleMaxInnerProductScore(dist); + } + return Math.max((1f + dist) / 2f, 0); + } + + private float euclideanScore( + int targetOrd, + float sqrtDimensions, + byte[] quantizedQuery, + float distanceToCentroid, + float lower, + int quantizedSum, + float width + ) throws IOException { + byte[] binaryCode = targetVectors.vectorValue(targetOrd); + + // FIXME: pre-compute these only once for each target vector + // .. not sure how to enumerate the target ordinals but that's what we did in PoC + float targetDistToC = targetVectors.getCentroidDistance(targetOrd); + float x0 = targetVectors.getVectorMagnitude(targetOrd); + float sqrX = targetDistToC * targetDistToC; + double xX0 = targetDistToC / x0; + + // TODO maybe store? + float xbSum = (float) BQVectorUtils.popcount(binaryCode); + float factorPPC = (float) (-2.0 / sqrtDimensions * xX0 * (xbSum * 2.0 - targetVectors.dimension())); + float factorIP = (float) (-2.0 / sqrtDimensions * xX0); + + long qcDist = ESVectorUtil.ipByteBinByte(quantizedQuery, binaryCode); + float score = sqrX + distanceToCentroid + factorPPC * lower + (qcDist * 2 - quantizedSum) * factorIP * width; + // TODO: this is useful for mandatory rescoring by accounting for bias + // However, for just oversampling & rescoring, it isn't strictly useful. + // We should consider utilizing this bias in the future to determine which vectors need to + // be rescored + // float projectionDist = (float) Math.sqrt(xX0 * xX0 - targetDistToC * targetDistToC); + // float error = 2.0f * maxX1 * projectionDist; + // float y = (float) Math.sqrt(distanceToCentroid); + // float errorBound = y * error; + // if (Float.isFinite(errorBound)) { + // score = dist + errorBound; + // } + return Math.max(1 / (1f + score), 0); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsFormat.java new file mode 100644 index 000000000000..523d5f6c4a91 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsFormat.java @@ -0,0 +1,75 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2024 Elasticsearch B.V. + */ +package org.elasticsearch.index.codec.vectors; + +import org.apache.lucene.codecs.hnsw.FlatVectorScorerUtil; +import org.apache.lucene.codecs.hnsw.FlatVectorsFormat; +import org.apache.lucene.codecs.hnsw.FlatVectorsReader; +import org.apache.lucene.codecs.hnsw.FlatVectorsWriter; +import org.apache.lucene.codecs.lucene99.Lucene99FlatVectorsFormat; +import org.apache.lucene.index.SegmentReadState; +import org.apache.lucene.index.SegmentWriteState; + +import java.io.IOException; + +/** + * Copied from Lucene, replace with Lucene's implementation sometime after Lucene 10 + */ +public class ES816BinaryQuantizedVectorsFormat extends FlatVectorsFormat { + + public static final String BINARIZED_VECTOR_COMPONENT = "BVEC"; + public static final String NAME = "ES816BinaryQuantizedVectorsFormat"; + + static final int VERSION_START = 0; + static final int VERSION_CURRENT = VERSION_START; + static final String META_CODEC_NAME = "ES816BinaryQuantizedVectorsFormatMeta"; + static final String VECTOR_DATA_CODEC_NAME = "ES816BinaryQuantizedVectorsFormatData"; + static final String META_EXTENSION = "vemb"; + static final String VECTOR_DATA_EXTENSION = "veb"; + static final int DIRECT_MONOTONIC_BLOCK_SHIFT = 16; + + private static final FlatVectorsFormat rawVectorFormat = new Lucene99FlatVectorsFormat( + FlatVectorScorerUtil.getLucene99FlatVectorsScorer() + ); + + private static final ES816BinaryFlatVectorsScorer scorer = new ES816BinaryFlatVectorsScorer( + FlatVectorScorerUtil.getLucene99FlatVectorsScorer() + ); + + /** Creates a new instance with the default number of vectors per cluster. */ + public ES816BinaryQuantizedVectorsFormat() { + super(NAME); + } + + @Override + public FlatVectorsWriter fieldsWriter(SegmentWriteState state) throws IOException { + return new ES816BinaryQuantizedVectorsWriter(scorer, rawVectorFormat.fieldsWriter(state), state); + } + + @Override + public FlatVectorsReader fieldsReader(SegmentReadState state) throws IOException { + return new ES816BinaryQuantizedVectorsReader(state, rawVectorFormat.fieldsReader(state), scorer); + } + + @Override + public String toString() { + return "ES816BinaryQuantizedVectorsFormat(name=" + NAME + ", flatVectorScorer=" + scorer + ")"; + } +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsReader.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsReader.java new file mode 100644 index 000000000000..b0378fee6793 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsReader.java @@ -0,0 +1,412 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2024 Elasticsearch B.V. + */ +package org.elasticsearch.index.codec.vectors; + +import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.codecs.hnsw.FlatVectorsReader; +import org.apache.lucene.codecs.lucene95.OrdToDocDISIReaderConfiguration; +import org.apache.lucene.index.ByteVectorValues; +import org.apache.lucene.index.CorruptIndexException; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.FieldInfos; +import org.apache.lucene.index.FloatVectorValues; +import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.SegmentReadState; +import org.apache.lucene.index.VectorEncoding; +import org.apache.lucene.index.VectorSimilarityFunction; +import org.apache.lucene.search.KnnCollector; +import org.apache.lucene.search.VectorScorer; +import org.apache.lucene.store.ChecksumIndexInput; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.util.Bits; +import org.apache.lucene.util.IOUtils; +import org.apache.lucene.util.RamUsageEstimator; +import org.apache.lucene.util.SuppressForbidden; +import org.apache.lucene.util.hnsw.OrdinalTranslatedKnnCollector; +import org.apache.lucene.util.hnsw.RandomVectorScorer; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsReader.readSimilarityFunction; +import static org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsReader.readVectorEncoding; + +/** + * Copied from Lucene, replace with Lucene's implementation sometime after Lucene 10 + */ +@SuppressForbidden(reason = "Lucene classes") +public class ES816BinaryQuantizedVectorsReader extends FlatVectorsReader { + + private static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(ES816BinaryQuantizedVectorsReader.class); + + private final Map fields = new HashMap<>(); + private final IndexInput quantizedVectorData; + private final FlatVectorsReader rawVectorsReader; + private final ES816BinaryFlatVectorsScorer vectorScorer; + + public ES816BinaryQuantizedVectorsReader( + SegmentReadState state, + FlatVectorsReader rawVectorsReader, + ES816BinaryFlatVectorsScorer vectorsScorer + ) throws IOException { + super(vectorsScorer); + this.vectorScorer = vectorsScorer; + this.rawVectorsReader = rawVectorsReader; + int versionMeta = -1; + String metaFileName = IndexFileNames.segmentFileName( + state.segmentInfo.name, + state.segmentSuffix, + ES816BinaryQuantizedVectorsFormat.META_EXTENSION + ); + boolean success = false; + try (ChecksumIndexInput meta = state.directory.openChecksumInput(metaFileName, state.context)) { + Throwable priorE = null; + try { + versionMeta = CodecUtil.checkIndexHeader( + meta, + ES816BinaryQuantizedVectorsFormat.META_CODEC_NAME, + ES816BinaryQuantizedVectorsFormat.VERSION_START, + ES816BinaryQuantizedVectorsFormat.VERSION_CURRENT, + state.segmentInfo.getId(), + state.segmentSuffix + ); + readFields(meta, state.fieldInfos); + } catch (Throwable exception) { + priorE = exception; + } finally { + CodecUtil.checkFooter(meta, priorE); + } + quantizedVectorData = openDataInput( + state, + versionMeta, + ES816BinaryQuantizedVectorsFormat.VECTOR_DATA_EXTENSION, + ES816BinaryQuantizedVectorsFormat.VECTOR_DATA_CODEC_NAME, + // Quantized vectors are accessed randomly from their node ID stored in the HNSW + // graph. + state.context.withRandomAccess() + ); + success = true; + } finally { + if (success == false) { + IOUtils.closeWhileHandlingException(this); + } + } + } + + private void readFields(ChecksumIndexInput meta, FieldInfos infos) throws IOException { + for (int fieldNumber = meta.readInt(); fieldNumber != -1; fieldNumber = meta.readInt()) { + FieldInfo info = infos.fieldInfo(fieldNumber); + if (info == null) { + throw new CorruptIndexException("Invalid field number: " + fieldNumber, meta); + } + FieldEntry fieldEntry = readField(meta, info); + validateFieldEntry(info, fieldEntry); + fields.put(info.name, fieldEntry); + } + } + + static void validateFieldEntry(FieldInfo info, FieldEntry fieldEntry) { + int dimension = info.getVectorDimension(); + if (dimension != fieldEntry.dimension) { + throw new IllegalStateException( + "Inconsistent vector dimension for field=\"" + info.name + "\"; " + dimension + " != " + fieldEntry.dimension + ); + } + + int binaryDims = BQVectorUtils.discretize(dimension, 64) / 8; + int correctionsCount = fieldEntry.similarityFunction != VectorSimilarityFunction.EUCLIDEAN ? 3 : 2; + long numQuantizedVectorBytes = Math.multiplyExact((binaryDims + (Float.BYTES * correctionsCount)), (long) fieldEntry.size); + if (numQuantizedVectorBytes != fieldEntry.vectorDataLength) { + throw new IllegalStateException( + "Binarized vector data length " + + fieldEntry.vectorDataLength + + " not matching size = " + + fieldEntry.size + + " * (binaryBytes=" + + binaryDims + + " + 8" + + ") = " + + numQuantizedVectorBytes + ); + } + } + + @Override + public RandomVectorScorer getRandomVectorScorer(String field, float[] target) throws IOException { + FieldEntry fi = fields.get(field); + if (fi == null) { + return null; + } + return vectorScorer.getRandomVectorScorer( + fi.similarityFunction, + OffHeapBinarizedVectorValues.load( + fi.ordToDocDISIReaderConfiguration, + fi.dimension, + fi.size, + new BinaryQuantizer(fi.dimension, fi.descritizedDimension, fi.similarityFunction), + fi.similarityFunction, + vectorScorer, + fi.centroid, + fi.centroidDP, + fi.vectorDataOffset, + fi.vectorDataLength, + quantizedVectorData + ), + target + ); + } + + @Override + public RandomVectorScorer getRandomVectorScorer(String field, byte[] target) throws IOException { + return rawVectorsReader.getRandomVectorScorer(field, target); + } + + @Override + public void checkIntegrity() throws IOException { + rawVectorsReader.checkIntegrity(); + CodecUtil.checksumEntireFile(quantizedVectorData); + } + + @Override + public FloatVectorValues getFloatVectorValues(String field) throws IOException { + FieldEntry fi = fields.get(field); + if (fi == null) { + return null; + } + if (fi.vectorEncoding != VectorEncoding.FLOAT32) { + throw new IllegalArgumentException( + "field=\"" + field + "\" is encoded as: " + fi.vectorEncoding + " expected: " + VectorEncoding.FLOAT32 + ); + } + OffHeapBinarizedVectorValues bvv = OffHeapBinarizedVectorValues.load( + fi.ordToDocDISIReaderConfiguration, + fi.dimension, + fi.size, + new BinaryQuantizer(fi.dimension, fi.descritizedDimension, fi.similarityFunction), + fi.similarityFunction, + vectorScorer, + fi.centroid, + fi.centroidDP, + fi.vectorDataOffset, + fi.vectorDataLength, + quantizedVectorData + ); + return new BinarizedVectorValues(rawVectorsReader.getFloatVectorValues(field), bvv); + } + + @Override + public ByteVectorValues getByteVectorValues(String field) throws IOException { + return rawVectorsReader.getByteVectorValues(field); + } + + @Override + public void search(String field, byte[] target, KnnCollector knnCollector, Bits acceptDocs) throws IOException { + rawVectorsReader.search(field, target, knnCollector, acceptDocs); + } + + @Override + public void search(String field, float[] target, KnnCollector knnCollector, Bits acceptDocs) throws IOException { + if (knnCollector.k() == 0) return; + final RandomVectorScorer scorer = getRandomVectorScorer(field, target); + if (scorer == null) return; + OrdinalTranslatedKnnCollector collector = new OrdinalTranslatedKnnCollector(knnCollector, scorer::ordToDoc); + Bits acceptedOrds = scorer.getAcceptOrds(acceptDocs); + for (int i = 0; i < scorer.maxOrd(); i++) { + if (acceptedOrds == null || acceptedOrds.get(i)) { + collector.collect(i, scorer.score(i)); + collector.incVisitedCount(1); + } + } + } + + @Override + public void close() throws IOException { + IOUtils.close(quantizedVectorData, rawVectorsReader); + } + + @Override + public long ramBytesUsed() { + long size = SHALLOW_SIZE; + size += RamUsageEstimator.sizeOfMap(fields, RamUsageEstimator.shallowSizeOfInstance(FieldEntry.class)); + size += rawVectorsReader.ramBytesUsed(); + return size; + } + + public float[] getCentroid(String field) { + FieldEntry fieldEntry = fields.get(field); + if (fieldEntry != null) { + return fieldEntry.centroid; + } + return null; + } + + private static IndexInput openDataInput( + SegmentReadState state, + int versionMeta, + String fileExtension, + String codecName, + IOContext context + ) throws IOException { + String fileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, fileExtension); + IndexInput in = state.directory.openInput(fileName, context); + boolean success = false; + try { + int versionVectorData = CodecUtil.checkIndexHeader( + in, + codecName, + ES816BinaryQuantizedVectorsFormat.VERSION_START, + ES816BinaryQuantizedVectorsFormat.VERSION_CURRENT, + state.segmentInfo.getId(), + state.segmentSuffix + ); + if (versionMeta != versionVectorData) { + throw new CorruptIndexException( + "Format versions mismatch: meta=" + versionMeta + ", " + codecName + "=" + versionVectorData, + in + ); + } + CodecUtil.retrieveChecksum(in); + success = true; + return in; + } finally { + if (success == false) { + IOUtils.closeWhileHandlingException(in); + } + } + } + + private FieldEntry readField(IndexInput input, FieldInfo info) throws IOException { + VectorEncoding vectorEncoding = readVectorEncoding(input); + VectorSimilarityFunction similarityFunction = readSimilarityFunction(input); + if (similarityFunction != info.getVectorSimilarityFunction()) { + throw new IllegalStateException( + "Inconsistent vector similarity function for field=\"" + + info.name + + "\"; " + + similarityFunction + + " != " + + info.getVectorSimilarityFunction() + ); + } + return FieldEntry.create(input, vectorEncoding, info.getVectorSimilarityFunction()); + } + + private record FieldEntry( + VectorSimilarityFunction similarityFunction, + VectorEncoding vectorEncoding, + int dimension, + int descritizedDimension, + long vectorDataOffset, + long vectorDataLength, + int size, + float[] centroid, + float centroidDP, + OrdToDocDISIReaderConfiguration ordToDocDISIReaderConfiguration + ) { + + static FieldEntry create(IndexInput input, VectorEncoding vectorEncoding, VectorSimilarityFunction similarityFunction) + throws IOException { + int dimension = input.readVInt(); + long vectorDataOffset = input.readVLong(); + long vectorDataLength = input.readVLong(); + int size = input.readVInt(); + final float[] centroid; + float centroidDP = 0; + if (size > 0) { + centroid = new float[dimension]; + input.readFloats(centroid, 0, dimension); + centroidDP = Float.intBitsToFloat(input.readInt()); + } else { + centroid = null; + } + OrdToDocDISIReaderConfiguration conf = OrdToDocDISIReaderConfiguration.fromStoredMeta(input, size); + return new FieldEntry( + similarityFunction, + vectorEncoding, + dimension, + BQVectorUtils.discretize(dimension, 64), + vectorDataOffset, + vectorDataLength, + size, + centroid, + centroidDP, + conf + ); + } + } + + /** Binarized vector values holding row and quantized vector values */ + protected static final class BinarizedVectorValues extends FloatVectorValues { + private final FloatVectorValues rawVectorValues; + private final OffHeapBinarizedVectorValues quantizedVectorValues; + + BinarizedVectorValues(FloatVectorValues rawVectorValues, OffHeapBinarizedVectorValues quantizedVectorValues) { + this.rawVectorValues = rawVectorValues; + this.quantizedVectorValues = quantizedVectorValues; + } + + @Override + public int dimension() { + return rawVectorValues.dimension(); + } + + @Override + public int size() { + return rawVectorValues.size(); + } + + @Override + public float[] vectorValue() throws IOException { + return rawVectorValues.vectorValue(); + } + + @Override + public int docID() { + return rawVectorValues.docID(); + } + + @Override + public int nextDoc() throws IOException { + int rawDocId = rawVectorValues.nextDoc(); + int quantizedDocId = quantizedVectorValues.nextDoc(); + assert rawDocId == quantizedDocId; + return quantizedDocId; + } + + @Override + public int advance(int target) throws IOException { + int rawDocId = rawVectorValues.advance(target); + int quantizedDocId = quantizedVectorValues.advance(target); + assert rawDocId == quantizedDocId; + return quantizedDocId; + } + + @Override + public VectorScorer scorer(float[] query) throws IOException { + return quantizedVectorValues.scorer(query); + } + + protected OffHeapBinarizedVectorValues getQuantizedVectorValues() throws IOException { + return quantizedVectorValues; + } + } +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsWriter.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsWriter.java new file mode 100644 index 000000000000..92837a8ffce4 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsWriter.java @@ -0,0 +1,987 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2024 Elasticsearch B.V. + */ +package org.elasticsearch.index.codec.vectors; + +import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.codecs.KnnVectorsReader; +import org.apache.lucene.codecs.KnnVectorsWriter; +import org.apache.lucene.codecs.hnsw.FlatFieldVectorsWriter; +import org.apache.lucene.codecs.hnsw.FlatVectorsWriter; +import org.apache.lucene.codecs.lucene95.OrdToDocDISIReaderConfiguration; +import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; +import org.apache.lucene.index.DocsWithFieldSet; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.FloatVectorValues; +import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.MergeState; +import org.apache.lucene.index.SegmentWriteState; +import org.apache.lucene.index.Sorter; +import org.apache.lucene.index.VectorEncoding; +import org.apache.lucene.index.VectorSimilarityFunction; +import org.apache.lucene.internal.hppc.FloatArrayList; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.VectorScorer; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.util.IOUtils; +import org.apache.lucene.util.RamUsageEstimator; +import org.apache.lucene.util.VectorUtil; +import org.apache.lucene.util.hnsw.CloseableRandomVectorScorerSupplier; +import org.apache.lucene.util.hnsw.RandomAccessVectorValues; +import org.apache.lucene.util.hnsw.RandomVectorScorer; +import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; +import org.elasticsearch.core.SuppressForbidden; + +import java.io.Closeable; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import static org.apache.lucene.index.VectorSimilarityFunction.COSINE; +import static org.apache.lucene.index.VectorSimilarityFunction.EUCLIDEAN; +import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; +import static org.apache.lucene.util.RamUsageEstimator.shallowSizeOfInstance; +import static org.elasticsearch.index.codec.vectors.ES816BinaryQuantizedVectorsFormat.BINARIZED_VECTOR_COMPONENT; +import static org.elasticsearch.index.codec.vectors.ES816BinaryQuantizedVectorsFormat.DIRECT_MONOTONIC_BLOCK_SHIFT; + +/** + * Copied from Lucene, replace with Lucene's implementation sometime after Lucene 10 + */ +@SuppressForbidden(reason = "Lucene classes") +public class ES816BinaryQuantizedVectorsWriter extends FlatVectorsWriter { + private static final long SHALLOW_RAM_BYTES_USED = shallowSizeOfInstance(ES816BinaryQuantizedVectorsWriter.class); + + private final SegmentWriteState segmentWriteState; + private final List fields = new ArrayList<>(); + private final IndexOutput meta, binarizedVectorData; + private final FlatVectorsWriter rawVectorDelegate; + private final ES816BinaryFlatVectorsScorer vectorsScorer; + private boolean finished; + + /** + * Sole constructor + * + * @param vectorsScorer the scorer to use for scoring vectors + */ + protected ES816BinaryQuantizedVectorsWriter( + ES816BinaryFlatVectorsScorer vectorsScorer, + FlatVectorsWriter rawVectorDelegate, + SegmentWriteState state + ) throws IOException { + super(vectorsScorer); + this.vectorsScorer = vectorsScorer; + this.segmentWriteState = state; + String metaFileName = IndexFileNames.segmentFileName( + state.segmentInfo.name, + state.segmentSuffix, + ES816BinaryQuantizedVectorsFormat.META_EXTENSION + ); + + String binarizedVectorDataFileName = IndexFileNames.segmentFileName( + state.segmentInfo.name, + state.segmentSuffix, + ES816BinaryQuantizedVectorsFormat.VECTOR_DATA_EXTENSION + ); + this.rawVectorDelegate = rawVectorDelegate; + boolean success = false; + try { + meta = state.directory.createOutput(metaFileName, state.context); + binarizedVectorData = state.directory.createOutput(binarizedVectorDataFileName, state.context); + + CodecUtil.writeIndexHeader( + meta, + ES816BinaryQuantizedVectorsFormat.META_CODEC_NAME, + ES816BinaryQuantizedVectorsFormat.VERSION_CURRENT, + state.segmentInfo.getId(), + state.segmentSuffix + ); + CodecUtil.writeIndexHeader( + binarizedVectorData, + ES816BinaryQuantizedVectorsFormat.VECTOR_DATA_CODEC_NAME, + ES816BinaryQuantizedVectorsFormat.VERSION_CURRENT, + state.segmentInfo.getId(), + state.segmentSuffix + ); + success = true; + } finally { + if (success == false) { + IOUtils.closeWhileHandlingException(this); + } + } + } + + public FlatFieldVectorsWriter addField(FieldInfo fieldInfo) throws IOException { + FlatFieldVectorsWriter rawVectorDelegate = this.rawVectorDelegate.addField(fieldInfo); + if (fieldInfo.getVectorEncoding().equals(VectorEncoding.FLOAT32)) { + @SuppressWarnings("unchecked") + FieldWriter fieldWriter = new FieldWriter(fieldInfo, (FlatFieldVectorsWriter) rawVectorDelegate); + fields.add(fieldWriter); + return fieldWriter; + } + return rawVectorDelegate; + } + + @Override + public void flush(int maxDoc, Sorter.DocMap sortMap) throws IOException { + rawVectorDelegate.flush(maxDoc, sortMap); + for (FieldWriter field : fields) { + // after raw vectors are written, normalize vectors for clustering and quantization + if (VectorSimilarityFunction.COSINE == field.fieldInfo.getVectorSimilarityFunction()) { + field.normalizeVectors(); + } + + final float[] clusterCenter; + int vectorCount = field.flatFieldVectorsWriter.getVectors().size(); + clusterCenter = new float[field.dimensionSums.length]; + if (vectorCount > 0) { + for (int i = 0; i < field.dimensionSums.length; i++) { + clusterCenter[i] = field.dimensionSums[i] / vectorCount; + } + if (VectorSimilarityFunction.COSINE == field.fieldInfo.getVectorSimilarityFunction()) { + VectorUtil.l2normalize(clusterCenter); + } + } + if (segmentWriteState.infoStream.isEnabled(BINARIZED_VECTOR_COMPONENT)) { + segmentWriteState.infoStream.message(BINARIZED_VECTOR_COMPONENT, "Vectors' count:" + vectorCount); + } + int descritizedDimension = BQVectorUtils.discretize(field.fieldInfo.getVectorDimension(), 64); + BinaryQuantizer quantizer = new BinaryQuantizer( + field.fieldInfo.getVectorDimension(), + descritizedDimension, + field.fieldInfo.getVectorSimilarityFunction() + ); + if (sortMap == null) { + writeField(field, clusterCenter, maxDoc, quantizer); + } else { + writeSortingField(field, clusterCenter, maxDoc, sortMap, quantizer); + } + field.finish(); + } + } + + private void writeField(FieldWriter fieldData, float[] clusterCenter, int maxDoc, BinaryQuantizer quantizer) throws IOException { + // write vector values + long vectorDataOffset = binarizedVectorData.alignFilePointer(Float.BYTES); + writeBinarizedVectors(fieldData, clusterCenter, quantizer); + long vectorDataLength = binarizedVectorData.getFilePointer() - vectorDataOffset; + float centroidDp = fieldData.getVectors().size() > 0 ? VectorUtil.dotProduct(clusterCenter, clusterCenter) : 0; + + writeMeta( + fieldData.fieldInfo, + maxDoc, + vectorDataOffset, + vectorDataLength, + clusterCenter, + centroidDp, + fieldData.getDocsWithFieldSet() + ); + } + + private void writeBinarizedVectors(FieldWriter fieldData, float[] clusterCenter, BinaryQuantizer scalarQuantizer) throws IOException { + byte[] vector = new byte[BQVectorUtils.discretize(fieldData.fieldInfo.getVectorDimension(), 64) / 8]; + int correctionsCount = scalarQuantizer.getSimilarity() != EUCLIDEAN ? 3 : 2; + final ByteBuffer correctionsBuffer = ByteBuffer.allocate(Float.BYTES * correctionsCount).order(ByteOrder.LITTLE_ENDIAN); + for (int i = 0; i < fieldData.getVectors().size(); i++) { + float[] v = fieldData.getVectors().get(i); + float[] corrections = scalarQuantizer.quantizeForIndex(v, vector, clusterCenter); + binarizedVectorData.writeBytes(vector, vector.length); + for (int j = 0; j < corrections.length; j++) { + correctionsBuffer.putFloat(corrections[j]); + } + binarizedVectorData.writeBytes(correctionsBuffer.array(), correctionsBuffer.array().length); + correctionsBuffer.rewind(); + } + } + + private void writeSortingField( + FieldWriter fieldData, + float[] clusterCenter, + int maxDoc, + Sorter.DocMap sortMap, + BinaryQuantizer scalarQuantizer + ) throws IOException { + final int[] ordMap = new int[fieldData.getDocsWithFieldSet().cardinality()]; // new ord to old ord + + DocsWithFieldSet newDocsWithField = new DocsWithFieldSet(); + mapOldOrdToNewOrd(fieldData.getDocsWithFieldSet(), sortMap, null, ordMap, newDocsWithField); + + // write vector values + long vectorDataOffset = binarizedVectorData.alignFilePointer(Float.BYTES); + writeSortedBinarizedVectors(fieldData, clusterCenter, ordMap, scalarQuantizer); + long quantizedVectorLength = binarizedVectorData.getFilePointer() - vectorDataOffset; + + float centroidDp = VectorUtil.dotProduct(clusterCenter, clusterCenter); + writeMeta(fieldData.fieldInfo, maxDoc, vectorDataOffset, quantizedVectorLength, clusterCenter, centroidDp, newDocsWithField); + } + + private void writeSortedBinarizedVectors(FieldWriter fieldData, float[] clusterCenter, int[] ordMap, BinaryQuantizer scalarQuantizer) + throws IOException { + byte[] vector = new byte[BQVectorUtils.discretize(fieldData.fieldInfo.getVectorDimension(), 64) / 8]; + int correctionsCount = scalarQuantizer.getSimilarity() != EUCLIDEAN ? 3 : 2; + final ByteBuffer correctionsBuffer = ByteBuffer.allocate(Float.BYTES * correctionsCount).order(ByteOrder.LITTLE_ENDIAN); + for (int ordinal : ordMap) { + float[] v = fieldData.getVectors().get(ordinal); + float[] corrections = scalarQuantizer.quantizeForIndex(v, vector, clusterCenter); + binarizedVectorData.writeBytes(vector, vector.length); + for (int i = 0; i < corrections.length; i++) { + correctionsBuffer.putFloat(corrections[i]); + } + binarizedVectorData.writeBytes(correctionsBuffer.array(), correctionsBuffer.array().length); + correctionsBuffer.rewind(); + } + } + + private void writeMeta( + FieldInfo field, + int maxDoc, + long vectorDataOffset, + long vectorDataLength, + float[] clusterCenter, + float centroidDp, + DocsWithFieldSet docsWithField + ) throws IOException { + meta.writeInt(field.number); + meta.writeInt(field.getVectorEncoding().ordinal()); + meta.writeInt(field.getVectorSimilarityFunction().ordinal()); + meta.writeVInt(field.getVectorDimension()); + meta.writeVLong(vectorDataOffset); + meta.writeVLong(vectorDataLength); + int count = docsWithField.cardinality(); + meta.writeVInt(count); + if (count > 0) { + final ByteBuffer buffer = ByteBuffer.allocate(field.getVectorDimension() * Float.BYTES).order(ByteOrder.LITTLE_ENDIAN); + buffer.asFloatBuffer().put(clusterCenter); + meta.writeBytes(buffer.array(), buffer.array().length); + meta.writeInt(Float.floatToIntBits(centroidDp)); + } + OrdToDocDISIReaderConfiguration.writeStoredMeta( + DIRECT_MONOTONIC_BLOCK_SHIFT, + meta, + binarizedVectorData, + count, + maxDoc, + docsWithField + ); + } + + @Override + public void finish() throws IOException { + if (finished) { + throw new IllegalStateException("already finished"); + } + finished = true; + rawVectorDelegate.finish(); + if (meta != null) { + // write end of fields marker + meta.writeInt(-1); + CodecUtil.writeFooter(meta); + } + if (binarizedVectorData != null) { + CodecUtil.writeFooter(binarizedVectorData); + } + } + + @Override + public void mergeOneField(FieldInfo fieldInfo, MergeState mergeState) throws IOException { + if (fieldInfo.getVectorEncoding().equals(VectorEncoding.FLOAT32)) { + final float[] centroid; + final float[] mergedCentroid = new float[fieldInfo.getVectorDimension()]; + int vectorCount = mergeAndRecalculateCentroids(mergeState, fieldInfo, mergedCentroid); + // Don't need access to the random vectors, we can just use the merged + rawVectorDelegate.mergeOneField(fieldInfo, mergeState); + centroid = mergedCentroid; + if (segmentWriteState.infoStream.isEnabled(BINARIZED_VECTOR_COMPONENT)) { + segmentWriteState.infoStream.message(BINARIZED_VECTOR_COMPONENT, "Vectors' count:" + vectorCount); + } + int descritizedDimension = BQVectorUtils.discretize(fieldInfo.getVectorDimension(), 64); + FloatVectorValues floatVectorValues = KnnVectorsWriter.MergedVectorValues.mergeFloatVectorValues(fieldInfo, mergeState); + if (fieldInfo.getVectorSimilarityFunction() == COSINE) { + floatVectorValues = new NormalizedFloatVectorValues(floatVectorValues); + } + BinarizedFloatVectorValues binarizedVectorValues = new BinarizedFloatVectorValues( + floatVectorValues, + new BinaryQuantizer(fieldInfo.getVectorDimension(), descritizedDimension, fieldInfo.getVectorSimilarityFunction()), + centroid + ); + long vectorDataOffset = binarizedVectorData.alignFilePointer(Float.BYTES); + DocsWithFieldSet docsWithField = writeBinarizedVectorData(binarizedVectorData, binarizedVectorValues); + long vectorDataLength = binarizedVectorData.getFilePointer() - vectorDataOffset; + float centroidDp = docsWithField.cardinality() > 0 ? VectorUtil.dotProduct(centroid, centroid) : 0; + writeMeta( + fieldInfo, + segmentWriteState.segmentInfo.maxDoc(), + vectorDataOffset, + vectorDataLength, + centroid, + centroidDp, + docsWithField + ); + } else { + rawVectorDelegate.mergeOneField(fieldInfo, mergeState); + } + } + + static DocsWithFieldSet writeBinarizedVectorAndQueryData( + IndexOutput binarizedVectorData, + IndexOutput binarizedQueryData, + FloatVectorValues floatVectorValues, + float[] centroid, + BinaryQuantizer binaryQuantizer + ) throws IOException { + DocsWithFieldSet docsWithField = new DocsWithFieldSet(); + byte[] toIndex = new byte[BQVectorUtils.discretize(floatVectorValues.dimension(), 64) / 8]; + byte[] toQuery = new byte[(BQVectorUtils.discretize(floatVectorValues.dimension(), 64) / 8) * BQSpaceUtils.B_QUERY]; + int queryCorrectionCount = binaryQuantizer.getSimilarity() != EUCLIDEAN ? 5 : 3; + final ByteBuffer queryCorrectionsBuffer = ByteBuffer.allocate(Float.BYTES * queryCorrectionCount + Short.BYTES) + .order(ByteOrder.LITTLE_ENDIAN); + for (int docV = floatVectorValues.nextDoc(); docV != NO_MORE_DOCS; docV = floatVectorValues.nextDoc()) { + // write index vector + BinaryQuantizer.QueryAndIndexResults r = binaryQuantizer.quantizeQueryAndIndex( + floatVectorValues.vectorValue(), + toIndex, + toQuery, + centroid + ); + binarizedVectorData.writeBytes(toIndex, toIndex.length); + float[] corrections = r.indexFeatures(); + for (int i = 0; i < corrections.length; i++) { + binarizedVectorData.writeInt(Float.floatToIntBits(corrections[i])); + } + docsWithField.add(docV); + + // write query vector + binarizedQueryData.writeBytes(toQuery, toQuery.length); + BinaryQuantizer.QueryFactors factors = r.queryFeatures(); + queryCorrectionsBuffer.putFloat(factors.distToC()); + queryCorrectionsBuffer.putFloat(factors.lower()); + queryCorrectionsBuffer.putFloat(factors.width()); + + if (binaryQuantizer.getSimilarity() != EUCLIDEAN) { + queryCorrectionsBuffer.putFloat(factors.normVmC()); + queryCorrectionsBuffer.putFloat(factors.vDotC()); + } + // ensure we are positive and fit within an unsigned short value. + assert factors.quantizedSum() >= 0 && factors.quantizedSum() <= 0xffff; + queryCorrectionsBuffer.putShort((short) factors.quantizedSum()); + + binarizedQueryData.writeBytes(queryCorrectionsBuffer.array(), queryCorrectionsBuffer.array().length); + queryCorrectionsBuffer.rewind(); + } + return docsWithField; + } + + static DocsWithFieldSet writeBinarizedVectorData(IndexOutput output, BinarizedByteVectorValues binarizedByteVectorValues) + throws IOException { + DocsWithFieldSet docsWithField = new DocsWithFieldSet(); + for (int docV = binarizedByteVectorValues.nextDoc(); docV != NO_MORE_DOCS; docV = binarizedByteVectorValues.nextDoc()) { + // write vector + byte[] binaryValue = binarizedByteVectorValues.vectorValue(); + output.writeBytes(binaryValue, binaryValue.length); + float[] corrections = binarizedByteVectorValues.getCorrectiveTerms(); + for (int i = 0; i < corrections.length; i++) { + output.writeInt(Float.floatToIntBits(corrections[i])); + } + docsWithField.add(docV); + } + return docsWithField; + } + + @Override + public CloseableRandomVectorScorerSupplier mergeOneFieldToIndex(FieldInfo fieldInfo, MergeState mergeState) throws IOException { + if (fieldInfo.getVectorEncoding().equals(VectorEncoding.FLOAT32)) { + final float[] centroid; + final float cDotC; + final float[] mergedCentroid = new float[fieldInfo.getVectorDimension()]; + int vectorCount = mergeAndRecalculateCentroids(mergeState, fieldInfo, mergedCentroid); + + // Don't need access to the random vectors, we can just use the merged + rawVectorDelegate.mergeOneField(fieldInfo, mergeState); + centroid = mergedCentroid; + cDotC = vectorCount > 0 ? VectorUtil.dotProduct(centroid, centroid) : 0; + if (segmentWriteState.infoStream.isEnabled(BINARIZED_VECTOR_COMPONENT)) { + segmentWriteState.infoStream.message(BINARIZED_VECTOR_COMPONENT, "Vectors' count:" + vectorCount); + } + return mergeOneFieldToIndex(segmentWriteState, fieldInfo, mergeState, centroid, cDotC); + } + return rawVectorDelegate.mergeOneFieldToIndex(fieldInfo, mergeState); + } + + private CloseableRandomVectorScorerSupplier mergeOneFieldToIndex( + SegmentWriteState segmentWriteState, + FieldInfo fieldInfo, + MergeState mergeState, + float[] centroid, + float cDotC + ) throws IOException { + long vectorDataOffset = binarizedVectorData.alignFilePointer(Float.BYTES); + final IndexOutput tempQuantizedVectorData = segmentWriteState.directory.createTempOutput( + binarizedVectorData.getName(), + "temp", + segmentWriteState.context + ); + final IndexOutput tempScoreQuantizedVectorData = segmentWriteState.directory.createTempOutput( + binarizedVectorData.getName(), + "score_temp", + segmentWriteState.context + ); + IndexInput binarizedDataInput = null; + IndexInput binarizedScoreDataInput = null; + boolean success = false; + int descritizedDimension = BQVectorUtils.discretize(fieldInfo.getVectorDimension(), 64); + BinaryQuantizer quantizer = new BinaryQuantizer( + fieldInfo.getVectorDimension(), + descritizedDimension, + fieldInfo.getVectorSimilarityFunction() + ); + try { + FloatVectorValues floatVectorValues = KnnVectorsWriter.MergedVectorValues.mergeFloatVectorValues(fieldInfo, mergeState); + if (fieldInfo.getVectorSimilarityFunction() == COSINE) { + floatVectorValues = new NormalizedFloatVectorValues(floatVectorValues); + } + DocsWithFieldSet docsWithField = writeBinarizedVectorAndQueryData( + tempQuantizedVectorData, + tempScoreQuantizedVectorData, + floatVectorValues, + centroid, + quantizer + ); + CodecUtil.writeFooter(tempQuantizedVectorData); + IOUtils.close(tempQuantizedVectorData); + binarizedDataInput = segmentWriteState.directory.openInput(tempQuantizedVectorData.getName(), segmentWriteState.context); + binarizedVectorData.copyBytes(binarizedDataInput, binarizedDataInput.length() - CodecUtil.footerLength()); + long vectorDataLength = binarizedVectorData.getFilePointer() - vectorDataOffset; + CodecUtil.retrieveChecksum(binarizedDataInput); + CodecUtil.writeFooter(tempScoreQuantizedVectorData); + IOUtils.close(tempScoreQuantizedVectorData); + binarizedScoreDataInput = segmentWriteState.directory.openInput( + tempScoreQuantizedVectorData.getName(), + segmentWriteState.context + ); + writeMeta( + fieldInfo, + segmentWriteState.segmentInfo.maxDoc(), + vectorDataOffset, + vectorDataLength, + centroid, + cDotC, + docsWithField + ); + success = true; + final IndexInput finalBinarizedDataInput = binarizedDataInput; + final IndexInput finalBinarizedScoreDataInput = binarizedScoreDataInput; + OffHeapBinarizedVectorValues vectorValues = new OffHeapBinarizedVectorValues.DenseOffHeapVectorValues( + fieldInfo.getVectorDimension(), + docsWithField.cardinality(), + centroid, + cDotC, + quantizer, + fieldInfo.getVectorSimilarityFunction(), + vectorsScorer, + finalBinarizedDataInput + ); + RandomVectorScorerSupplier scorerSupplier = vectorsScorer.getRandomVectorScorerSupplier( + fieldInfo.getVectorSimilarityFunction(), + new OffHeapBinarizedQueryVectorValues( + finalBinarizedScoreDataInput, + fieldInfo.getVectorDimension(), + docsWithField.cardinality(), + fieldInfo.getVectorSimilarityFunction() + ), + vectorValues + ); + return new BinarizedCloseableRandomVectorScorerSupplier(scorerSupplier, vectorValues, () -> { + IOUtils.close(finalBinarizedDataInput, finalBinarizedScoreDataInput); + IOUtils.deleteFilesIgnoringExceptions( + segmentWriteState.directory, + tempQuantizedVectorData.getName(), + tempScoreQuantizedVectorData.getName() + ); + }); + } finally { + if (success == false) { + IOUtils.closeWhileHandlingException( + tempQuantizedVectorData, + tempScoreQuantizedVectorData, + binarizedDataInput, + binarizedScoreDataInput + ); + IOUtils.deleteFilesIgnoringExceptions( + segmentWriteState.directory, + tempQuantizedVectorData.getName(), + tempScoreQuantizedVectorData.getName() + ); + } + } + } + + @Override + public void close() throws IOException { + IOUtils.close(meta, binarizedVectorData, rawVectorDelegate); + } + + static float[] getCentroid(KnnVectorsReader vectorsReader, String fieldName) { + if (vectorsReader instanceof PerFieldKnnVectorsFormat.FieldsReader candidateReader) { + vectorsReader = candidateReader.getFieldReader(fieldName); + } + if (vectorsReader instanceof ES816BinaryQuantizedVectorsReader reader) { + return reader.getCentroid(fieldName); + } + return null; + } + + static int mergeAndRecalculateCentroids(MergeState mergeState, FieldInfo fieldInfo, float[] mergedCentroid) throws IOException { + boolean recalculate = false; + int totalVectorCount = 0; + for (int i = 0; i < mergeState.knnVectorsReaders.length; i++) { + KnnVectorsReader knnVectorsReader = mergeState.knnVectorsReaders[i]; + if (knnVectorsReader == null || knnVectorsReader.getFloatVectorValues(fieldInfo.name) == null) { + continue; + } + float[] centroid = getCentroid(knnVectorsReader, fieldInfo.name); + int vectorCount = knnVectorsReader.getFloatVectorValues(fieldInfo.name).size(); + if (vectorCount == 0) { + continue; + } + totalVectorCount += vectorCount; + // If there aren't centroids, or previously clustered with more than one cluster + // or if there are deleted docs, we must recalculate the centroid + if (centroid == null || mergeState.liveDocs[i] != null) { + recalculate = true; + break; + } + for (int j = 0; j < centroid.length; j++) { + mergedCentroid[j] += centroid[j] * vectorCount; + } + } + if (recalculate) { + return calculateCentroid(mergeState, fieldInfo, mergedCentroid); + } else { + for (int j = 0; j < mergedCentroid.length; j++) { + mergedCentroid[j] = mergedCentroid[j] / totalVectorCount; + } + if (fieldInfo.getVectorSimilarityFunction() == COSINE) { + VectorUtil.l2normalize(mergedCentroid); + } + return totalVectorCount; + } + } + + static int calculateCentroid(MergeState mergeState, FieldInfo fieldInfo, float[] centroid) throws IOException { + assert fieldInfo.getVectorEncoding().equals(VectorEncoding.FLOAT32); + // clear out the centroid + Arrays.fill(centroid, 0); + int count = 0; + for (int i = 0; i < mergeState.knnVectorsReaders.length; i++) { + KnnVectorsReader knnVectorsReader = mergeState.knnVectorsReaders[i]; + if (knnVectorsReader == null) continue; + FloatVectorValues vectorValues = mergeState.knnVectorsReaders[i].getFloatVectorValues(fieldInfo.name); + if (vectorValues == null) { + continue; + } + for (int doc = vectorValues.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = vectorValues.nextDoc()) { + float[] vector = vectorValues.vectorValue(); + // TODO Panama sum + for (int j = 0; j < vector.length; j++) { + centroid[j] += vector[j]; + } + } + count += vectorValues.size(); + } + if (count == 0) { + return count; + } + // TODO Panama div + for (int i = 0; i < centroid.length; i++) { + centroid[i] /= count; + } + if (fieldInfo.getVectorSimilarityFunction() == COSINE) { + VectorUtil.l2normalize(centroid); + } + return count; + } + + @Override + public long ramBytesUsed() { + long total = SHALLOW_RAM_BYTES_USED; + for (FieldWriter field : fields) { + // the field tracks the delegate field usage + total += field.ramBytesUsed(); + } + return total; + } + + static class FieldWriter extends FlatFieldVectorsWriter { + private static final long SHALLOW_SIZE = shallowSizeOfInstance(FieldWriter.class); + private final FieldInfo fieldInfo; + private boolean finished; + private final FlatFieldVectorsWriter flatFieldVectorsWriter; + private final float[] dimensionSums; + private final FloatArrayList magnitudes = new FloatArrayList(); + + FieldWriter(FieldInfo fieldInfo, FlatFieldVectorsWriter flatFieldVectorsWriter) { + this.fieldInfo = fieldInfo; + this.flatFieldVectorsWriter = flatFieldVectorsWriter; + this.dimensionSums = new float[fieldInfo.getVectorDimension()]; + } + + @Override + public List getVectors() { + return flatFieldVectorsWriter.getVectors(); + } + + public void normalizeVectors() { + for (int i = 0; i < flatFieldVectorsWriter.getVectors().size(); i++) { + float[] vector = flatFieldVectorsWriter.getVectors().get(i); + float magnitude = magnitudes.get(i); + for (int j = 0; j < vector.length; j++) { + vector[j] /= magnitude; + } + } + } + + @Override + public DocsWithFieldSet getDocsWithFieldSet() { + return flatFieldVectorsWriter.getDocsWithFieldSet(); + } + + @Override + public void finish() throws IOException { + if (finished) { + return; + } + assert flatFieldVectorsWriter.isFinished(); + finished = true; + } + + @Override + public boolean isFinished() { + return finished && flatFieldVectorsWriter.isFinished(); + } + + @Override + public void addValue(int docID, float[] vectorValue) throws IOException { + flatFieldVectorsWriter.addValue(docID, vectorValue); + if (fieldInfo.getVectorSimilarityFunction() == COSINE) { + float dp = VectorUtil.dotProduct(vectorValue, vectorValue); + float divisor = (float) Math.sqrt(dp); + magnitudes.add(divisor); + for (int i = 0; i < vectorValue.length; i++) { + dimensionSums[i] += (vectorValue[i] / divisor); + } + } else { + for (int i = 0; i < vectorValue.length; i++) { + dimensionSums[i] += vectorValue[i]; + } + } + } + + @Override + public float[] copyValue(float[] vectorValue) { + throw new UnsupportedOperationException(); + } + + @Override + public long ramBytesUsed() { + long size = SHALLOW_SIZE; + size += flatFieldVectorsWriter.ramBytesUsed(); + size += RamUsageEstimator.sizeOf(dimensionSums); + size += magnitudes.ramBytesUsed(); + return size; + } + } + + // When accessing vectorValue method, targerOrd here means a row ordinal. + static class OffHeapBinarizedQueryVectorValues { + private final IndexInput slice; + private final int dimension; + private final int size; + protected final byte[] binaryValue; + protected final ByteBuffer byteBuffer; + private final int byteSize; + protected final float[] correctiveValues; + private int sumQuantizationValues; + private int lastOrd = -1; + private final int correctiveValuesSize; + private final VectorSimilarityFunction vectorSimilarityFunction; + + OffHeapBinarizedQueryVectorValues(IndexInput data, int dimension, int size, VectorSimilarityFunction vectorSimilarityFunction) { + this.slice = data; + this.dimension = dimension; + this.size = size; + this.vectorSimilarityFunction = vectorSimilarityFunction; + this.correctiveValuesSize = vectorSimilarityFunction != EUCLIDEAN ? 5 : 3; + // 4x the quantized binary dimensions + int binaryDimensions = (BQVectorUtils.discretize(dimension, 64) / 8) * BQSpaceUtils.B_QUERY; + this.byteBuffer = ByteBuffer.allocate(binaryDimensions); + this.binaryValue = byteBuffer.array(); + this.correctiveValues = new float[correctiveValuesSize]; + this.byteSize = binaryDimensions + Float.BYTES * correctiveValuesSize + Short.BYTES; + } + + public float getCentroidDistance(int targetOrd) throws IOException { + if (lastOrd == targetOrd) { + return correctiveValues[0]; + } + readCorrectiveValues(targetOrd); + return correctiveValues[0]; + } + + public float getLower(int targetOrd) throws IOException { + if (lastOrd == targetOrd) { + return correctiveValues[1]; + } + readCorrectiveValues(targetOrd); + return correctiveValues[1]; + } + + public float getWidth(int targetOrd) throws IOException { + if (lastOrd == targetOrd) { + return correctiveValues[2]; + } + readCorrectiveValues(targetOrd); + return correctiveValues[2]; + } + + public float getNormVmC(int targetOrd) throws IOException { + if (lastOrd == targetOrd) { + return correctiveValues[3]; + } + readCorrectiveValues(targetOrd); + return correctiveValues[3]; + } + + public float getVDotC(int targetOrd) throws IOException { + if (lastOrd == targetOrd) { + return correctiveValues[4]; + } + readCorrectiveValues(targetOrd); + return correctiveValues[4]; + } + + private void readCorrectiveValues(int targetOrd) throws IOException { + // load values + vectorValue(targetOrd); + } + + public int sumQuantizedValues(int targetOrd) throws IOException { + if (lastOrd == targetOrd) { + return sumQuantizationValues; + } + // load values + vectorValue(targetOrd); + return sumQuantizationValues; + } + + public int size() { + return size; + } + + public int dimension() { + return dimension; + } + + public OffHeapBinarizedQueryVectorValues copy() throws IOException { + return new OffHeapBinarizedQueryVectorValues(slice.clone(), dimension, size, vectorSimilarityFunction); + } + + public IndexInput getSlice() { + return slice; + } + + public byte[] vectorValue(int targetOrd) throws IOException { + if (lastOrd == targetOrd) { + return binaryValue; + } + slice.seek((long) targetOrd * byteSize); + slice.readBytes(binaryValue, 0, binaryValue.length); + slice.readFloats(correctiveValues, 0, correctiveValuesSize); + sumQuantizationValues = Short.toUnsignedInt(slice.readShort()); + lastOrd = targetOrd; + return binaryValue; + } + } + + static class BinarizedFloatVectorValues extends BinarizedByteVectorValues { + private float[] corrections; + private final byte[] binarized; + private final float[] centroid; + private final FloatVectorValues values; + private final BinaryQuantizer quantizer; + private int lastDoc; + + BinarizedFloatVectorValues(FloatVectorValues delegate, BinaryQuantizer quantizer, float[] centroid) { + this.values = delegate; + this.quantizer = quantizer; + this.binarized = new byte[BQVectorUtils.discretize(delegate.dimension(), 64) / 8]; + this.centroid = centroid; + lastDoc = -1; + } + + @Override + public float[] getCorrectiveTerms() { + return corrections; + } + + @Override + public byte[] vectorValue() throws IOException { + return binarized; + } + + @Override + public int dimension() { + return values.dimension(); + } + + @Override + public int size() { + return values.size(); + } + + @Override + public int docID() { + return values.docID(); + } + + @Override + public int nextDoc() throws IOException { + int doc = values.nextDoc(); + if (doc != NO_MORE_DOCS) { + binarize(); + } + lastDoc = doc; + return doc; + } + + @Override + public int advance(int target) throws IOException { + int doc = values.advance(target); + if (doc != NO_MORE_DOCS) { + binarize(); + } + lastDoc = doc; + return doc; + } + + @Override + public VectorScorer scorer(float[] target) throws IOException { + throw new UnsupportedOperationException(); + } + + private void binarize() throws IOException { + if (lastDoc == docID()) return; + corrections = quantizer.quantizeForIndex(values.vectorValue(), binarized, centroid); + } + } + + static class BinarizedCloseableRandomVectorScorerSupplier implements CloseableRandomVectorScorerSupplier { + private final RandomVectorScorerSupplier supplier; + private final RandomAccessVectorValues vectorValues; + private final Closeable onClose; + + BinarizedCloseableRandomVectorScorerSupplier( + RandomVectorScorerSupplier supplier, + RandomAccessVectorValues vectorValues, + Closeable onClose + ) { + this.supplier = supplier; + this.onClose = onClose; + this.vectorValues = vectorValues; + } + + @Override + public RandomVectorScorer scorer(int ord) throws IOException { + return supplier.scorer(ord); + } + + @Override + public RandomVectorScorerSupplier copy() throws IOException { + return supplier.copy(); + } + + @Override + public void close() throws IOException { + onClose.close(); + } + + @Override + public int totalVectorCount() { + return vectorValues.size(); + } + } + + static final class NormalizedFloatVectorValues extends FloatVectorValues { + private final FloatVectorValues values; + private final float[] normalizedVector; + int curDoc = -1; + + NormalizedFloatVectorValues(FloatVectorValues values) { + this.values = values; + this.normalizedVector = new float[values.dimension()]; + } + + @Override + public int dimension() { + return values.dimension(); + } + + @Override + public int size() { + return values.size(); + } + + @Override + public float[] vectorValue() { + return normalizedVector; + } + + @Override + public VectorScorer scorer(float[] query) { + throw new UnsupportedOperationException(); + } + + @Override + public int docID() { + return values.docID(); + } + + @Override + public int nextDoc() throws IOException { + curDoc = values.nextDoc(); + if (curDoc != NO_MORE_DOCS) { + System.arraycopy(values.vectorValue(), 0, normalizedVector, 0, normalizedVector.length); + VectorUtil.l2normalize(normalizedVector); + } + return curDoc; + } + + @Override + public int advance(int target) throws IOException { + curDoc = values.advance(target); + if (curDoc != NO_MORE_DOCS) { + System.arraycopy(values.vectorValue(), 0, normalizedVector, 0, normalizedVector.length); + VectorUtil.l2normalize(normalizedVector); + } + return curDoc; + } + } +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816HnswBinaryQuantizedVectorsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816HnswBinaryQuantizedVectorsFormat.java new file mode 100644 index 000000000000..989f88e0a785 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816HnswBinaryQuantizedVectorsFormat.java @@ -0,0 +1,144 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2024 Elasticsearch B.V. + */ +package org.elasticsearch.index.codec.vectors; + +import org.apache.lucene.codecs.KnnVectorsFormat; +import org.apache.lucene.codecs.KnnVectorsReader; +import org.apache.lucene.codecs.KnnVectorsWriter; +import org.apache.lucene.codecs.hnsw.FlatVectorsFormat; +import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; +import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsReader; +import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsWriter; +import org.apache.lucene.index.SegmentReadState; +import org.apache.lucene.index.SegmentWriteState; +import org.apache.lucene.search.TaskExecutor; +import org.apache.lucene.util.hnsw.HnswGraph; + +import java.io.IOException; +import java.util.concurrent.ExecutorService; + +import static org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat.DEFAULT_BEAM_WIDTH; +import static org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat.DEFAULT_MAX_CONN; +import static org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat.DEFAULT_NUM_MERGE_WORKER; +import static org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat.MAXIMUM_BEAM_WIDTH; +import static org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat.MAXIMUM_MAX_CONN; + +/** + * Copied from Lucene, replace with Lucene's implementation sometime after Lucene 10 + */ +public class ES816HnswBinaryQuantizedVectorsFormat extends KnnVectorsFormat { + + public static final String NAME = "ES816HnswBinaryQuantizedVectorsFormat"; + + /** + * Controls how many of the nearest neighbor candidates are connected to the new node. Defaults to + * {@link Lucene99HnswVectorsFormat#DEFAULT_MAX_CONN}. See {@link HnswGraph} for more details. + */ + private final int maxConn; + + /** + * The number of candidate neighbors to track while searching the graph for each newly inserted + * node. Defaults to {@link Lucene99HnswVectorsFormat#DEFAULT_BEAM_WIDTH}. See {@link HnswGraph} + * for details. + */ + private final int beamWidth; + + /** The format for storing, reading, merging vectors on disk */ + private static final FlatVectorsFormat flatVectorsFormat = new ES816BinaryQuantizedVectorsFormat(); + + private final int numMergeWorkers; + private final TaskExecutor mergeExec; + + /** Constructs a format using default graph construction parameters */ + public ES816HnswBinaryQuantizedVectorsFormat() { + this(DEFAULT_MAX_CONN, DEFAULT_BEAM_WIDTH, DEFAULT_NUM_MERGE_WORKER, null); + } + + /** + * Constructs a format using the given graph construction parameters. + * + * @param maxConn the maximum number of connections to a node in the HNSW graph + * @param beamWidth the size of the queue maintained during graph construction. + */ + public ES816HnswBinaryQuantizedVectorsFormat(int maxConn, int beamWidth) { + this(maxConn, beamWidth, DEFAULT_NUM_MERGE_WORKER, null); + } + + /** + * Constructs a format using the given graph construction parameters and scalar quantization. + * + * @param maxConn the maximum number of connections to a node in the HNSW graph + * @param beamWidth the size of the queue maintained during graph construction. + * @param numMergeWorkers number of workers (threads) that will be used when doing merge. If + * larger than 1, a non-null {@link ExecutorService} must be passed as mergeExec + * @param mergeExec the {@link ExecutorService} that will be used by ALL vector writers that are + * generated by this format to do the merge + */ + public ES816HnswBinaryQuantizedVectorsFormat(int maxConn, int beamWidth, int numMergeWorkers, ExecutorService mergeExec) { + super(NAME); + if (maxConn <= 0 || maxConn > MAXIMUM_MAX_CONN) { + throw new IllegalArgumentException( + "maxConn must be positive and less than or equal to " + MAXIMUM_MAX_CONN + "; maxConn=" + maxConn + ); + } + if (beamWidth <= 0 || beamWidth > MAXIMUM_BEAM_WIDTH) { + throw new IllegalArgumentException( + "beamWidth must be positive and less than or equal to " + MAXIMUM_BEAM_WIDTH + "; beamWidth=" + beamWidth + ); + } + this.maxConn = maxConn; + this.beamWidth = beamWidth; + if (numMergeWorkers == 1 && mergeExec != null) { + throw new IllegalArgumentException("No executor service is needed as we'll use single thread to merge"); + } + this.numMergeWorkers = numMergeWorkers; + if (mergeExec != null) { + this.mergeExec = new TaskExecutor(mergeExec); + } else { + this.mergeExec = null; + } + } + + @Override + public KnnVectorsWriter fieldsWriter(SegmentWriteState state) throws IOException { + return new Lucene99HnswVectorsWriter(state, maxConn, beamWidth, flatVectorsFormat.fieldsWriter(state), numMergeWorkers, mergeExec); + } + + @Override + public KnnVectorsReader fieldsReader(SegmentReadState state) throws IOException { + return new Lucene99HnswVectorsReader(state, flatVectorsFormat.fieldsReader(state)); + } + + @Override + public int getMaxDimensions(String fieldName) { + return 1024; + } + + @Override + public String toString() { + return "ES816HnswBinaryQuantizedVectorsFormat(name=ES816HnswBinaryQuantizedVectorsFormat, maxConn=" + + maxConn + + ", beamWidth=" + + beamWidth + + ", flatVectorFormat=" + + flatVectorsFormat + + ")"; + } +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/OffHeapBinarizedVectorValues.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/OffHeapBinarizedVectorValues.java new file mode 100644 index 000000000000..2a3c3aca60e5 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/OffHeapBinarizedVectorValues.java @@ -0,0 +1,456 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2024 Elasticsearch B.V. + */ +package org.elasticsearch.index.codec.vectors; + +import org.apache.lucene.codecs.hnsw.FlatVectorsScorer; +import org.apache.lucene.codecs.lucene90.IndexedDISI; +import org.apache.lucene.codecs.lucene95.OrdToDocDISIReaderConfiguration; +import org.apache.lucene.index.VectorSimilarityFunction; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.VectorScorer; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.util.Bits; +import org.apache.lucene.util.hnsw.RandomVectorScorer; +import org.apache.lucene.util.packed.DirectMonotonicReader; + +import java.io.IOException; +import java.nio.ByteBuffer; + +import static org.apache.lucene.index.VectorSimilarityFunction.EUCLIDEAN; + +/** Binarized vector values loaded from off-heap */ +public abstract class OffHeapBinarizedVectorValues extends BinarizedByteVectorValues implements RandomAccessBinarizedByteVectorValues { + + protected final int dimension; + protected final int size; + protected final int numBytes; + protected final VectorSimilarityFunction similarityFunction; + protected final FlatVectorsScorer vectorsScorer; + + protected final IndexInput slice; + protected final byte[] binaryValue; + protected final ByteBuffer byteBuffer; + protected final int byteSize; + private int lastOrd = -1; + protected final float[] correctiveValues; + protected final BinaryQuantizer binaryQuantizer; + protected final float[] centroid; + protected final float centroidDp; + private final int correctionsCount; + + OffHeapBinarizedVectorValues( + int dimension, + int size, + float[] centroid, + float centroidDp, + BinaryQuantizer quantizer, + VectorSimilarityFunction similarityFunction, + FlatVectorsScorer vectorsScorer, + IndexInput slice + ) { + this.dimension = dimension; + this.size = size; + this.similarityFunction = similarityFunction; + this.vectorsScorer = vectorsScorer; + this.slice = slice; + this.centroid = centroid; + this.centroidDp = centroidDp; + this.numBytes = BQVectorUtils.discretize(dimension, 64) / 8; + this.correctionsCount = similarityFunction != EUCLIDEAN ? 3 : 2; + this.correctiveValues = new float[this.correctionsCount]; + this.byteSize = numBytes + (Float.BYTES * correctionsCount); + this.byteBuffer = ByteBuffer.allocate(numBytes); + this.binaryValue = byteBuffer.array(); + this.binaryQuantizer = quantizer; + } + + @Override + public int dimension() { + return dimension; + } + + @Override + public int size() { + return size; + } + + @Override + public byte[] vectorValue(int targetOrd) throws IOException { + if (lastOrd == targetOrd) { + return binaryValue; + } + slice.seek((long) targetOrd * byteSize); + slice.readBytes(byteBuffer.array(), byteBuffer.arrayOffset(), numBytes); + slice.readFloats(correctiveValues, 0, correctionsCount); + lastOrd = targetOrd; + return binaryValue; + } + + @Override + public float getCentroidDP() { + return centroidDp; + } + + @Override + public float[] getCorrectiveTerms() { + return correctiveValues; + } + + @Override + public float getCentroidDistance(int targetOrd) throws IOException { + if (lastOrd == targetOrd) { + return correctiveValues[0]; + } + slice.seek(((long) targetOrd * byteSize) + numBytes); + slice.readFloats(correctiveValues, 0, correctionsCount); + return correctiveValues[0]; + } + + @Override + public float getVectorMagnitude(int targetOrd) throws IOException { + if (lastOrd == targetOrd) { + return correctiveValues[1]; + } + slice.seek(((long) targetOrd * byteSize) + numBytes); + slice.readFloats(correctiveValues, 0, correctionsCount); + return correctiveValues[1]; + } + + @Override + public float getOOQ(int targetOrd) throws IOException { + if (lastOrd == targetOrd) { + return correctiveValues[0]; + } + slice.seek(((long) targetOrd * byteSize) + numBytes); + slice.readFloats(correctiveValues, 0, correctionsCount); + return correctiveValues[0]; + } + + @Override + public float getNormOC(int targetOrd) throws IOException { + if (lastOrd == targetOrd) { + return correctiveValues[1]; + } + slice.seek(((long) targetOrd * byteSize) + numBytes); + slice.readFloats(correctiveValues, 0, correctionsCount); + return correctiveValues[1]; + } + + @Override + public float getODotC(int targetOrd) throws IOException { + if (lastOrd == targetOrd) { + return correctiveValues[2]; + } + slice.seek(((long) targetOrd * byteSize) + numBytes); + slice.readFloats(correctiveValues, 0, correctionsCount); + return correctiveValues[2]; + } + + @Override + public BinaryQuantizer getQuantizer() { + return binaryQuantizer; + } + + @Override + public float[] getCentroid() { + return centroid; + } + + @Override + public IndexInput getSlice() { + return slice; + } + + @Override + public int getVectorByteLength() { + return numBytes; + } + + public static OffHeapBinarizedVectorValues load( + OrdToDocDISIReaderConfiguration configuration, + int dimension, + int size, + BinaryQuantizer binaryQuantizer, + VectorSimilarityFunction similarityFunction, + FlatVectorsScorer vectorsScorer, + float[] centroid, + float centroidDp, + long quantizedVectorDataOffset, + long quantizedVectorDataLength, + IndexInput vectorData + ) throws IOException { + if (configuration.isEmpty()) { + return new EmptyOffHeapVectorValues(dimension, similarityFunction, vectorsScorer); + } + assert centroid != null; + IndexInput bytesSlice = vectorData.slice("quantized-vector-data", quantizedVectorDataOffset, quantizedVectorDataLength); + if (configuration.isDense()) { + return new DenseOffHeapVectorValues( + dimension, + size, + centroid, + centroidDp, + binaryQuantizer, + similarityFunction, + vectorsScorer, + bytesSlice + ); + } else { + return new SparseOffHeapVectorValues( + configuration, + dimension, + size, + centroid, + centroidDp, + binaryQuantizer, + vectorData, + similarityFunction, + vectorsScorer, + bytesSlice + ); + } + } + + /** Dense off-heap binarized vector values */ + public static class DenseOffHeapVectorValues extends OffHeapBinarizedVectorValues { + private int doc = -1; + + public DenseOffHeapVectorValues( + int dimension, + int size, + float[] centroid, + float centroidDp, + BinaryQuantizer binaryQuantizer, + VectorSimilarityFunction similarityFunction, + FlatVectorsScorer vectorsScorer, + IndexInput slice + ) { + super(dimension, size, centroid, centroidDp, binaryQuantizer, similarityFunction, vectorsScorer, slice); + } + + @Override + public byte[] vectorValue() throws IOException { + return vectorValue(doc); + } + + @Override + public int docID() { + return doc; + } + + @Override + public int nextDoc() { + return advance(doc + 1); + } + + @Override + public int advance(int target) { + assert docID() < target; + if (target >= size) { + return doc = NO_MORE_DOCS; + } + return doc = target; + } + + @Override + public DenseOffHeapVectorValues copy() throws IOException { + return new DenseOffHeapVectorValues( + dimension, + size, + centroid, + centroidDp, + binaryQuantizer, + similarityFunction, + vectorsScorer, + slice.clone() + ); + } + + @Override + public Bits getAcceptOrds(Bits acceptDocs) { + return acceptDocs; + } + + @Override + public VectorScorer scorer(float[] target) throws IOException { + DenseOffHeapVectorValues copy = copy(); + RandomVectorScorer scorer = vectorsScorer.getRandomVectorScorer(similarityFunction, copy, target); + return new VectorScorer() { + @Override + public float score() throws IOException { + return scorer.score(copy.doc); + } + + @Override + public DocIdSetIterator iterator() { + return copy; + } + }; + } + } + + /** Sparse off-heap binarized vector values */ + private static class SparseOffHeapVectorValues extends OffHeapBinarizedVectorValues { + private final DirectMonotonicReader ordToDoc; + private final IndexedDISI disi; + // dataIn was used to init a new IndexedDIS for #randomAccess() + private final IndexInput dataIn; + private final OrdToDocDISIReaderConfiguration configuration; + + SparseOffHeapVectorValues( + OrdToDocDISIReaderConfiguration configuration, + int dimension, + int size, + float[] centroid, + float centroidDp, + BinaryQuantizer binaryQuantizer, + IndexInput dataIn, + VectorSimilarityFunction similarityFunction, + FlatVectorsScorer vectorsScorer, + IndexInput slice + ) throws IOException { + super(dimension, size, centroid, centroidDp, binaryQuantizer, similarityFunction, vectorsScorer, slice); + this.configuration = configuration; + this.dataIn = dataIn; + this.ordToDoc = configuration.getDirectMonotonicReader(dataIn); + this.disi = configuration.getIndexedDISI(dataIn); + } + + @Override + public byte[] vectorValue() throws IOException { + return vectorValue(disi.index()); + } + + @Override + public int docID() { + return disi.docID(); + } + + @Override + public int nextDoc() throws IOException { + return disi.nextDoc(); + } + + @Override + public int advance(int target) throws IOException { + assert docID() < target; + return disi.advance(target); + } + + @Override + public SparseOffHeapVectorValues copy() throws IOException { + return new SparseOffHeapVectorValues( + configuration, + dimension, + size, + centroid, + centroidDp, + binaryQuantizer, + dataIn, + similarityFunction, + vectorsScorer, + slice.clone() + ); + } + + @Override + public int ordToDoc(int ord) { + return (int) ordToDoc.get(ord); + } + + @Override + public Bits getAcceptOrds(Bits acceptDocs) { + if (acceptDocs == null) { + return null; + } + return new Bits() { + @Override + public boolean get(int index) { + return acceptDocs.get(ordToDoc(index)); + } + + @Override + public int length() { + return size; + } + }; + } + + @Override + public VectorScorer scorer(float[] target) throws IOException { + SparseOffHeapVectorValues copy = copy(); + RandomVectorScorer scorer = vectorsScorer.getRandomVectorScorer(similarityFunction, copy, target); + return new VectorScorer() { + @Override + public float score() throws IOException { + return scorer.score(copy.disi.index()); + } + + @Override + public DocIdSetIterator iterator() { + return copy; + } + }; + } + } + + private static class EmptyOffHeapVectorValues extends OffHeapBinarizedVectorValues { + private int doc = -1; + + EmptyOffHeapVectorValues(int dimension, VectorSimilarityFunction similarityFunction, FlatVectorsScorer vectorsScorer) { + super(dimension, 0, null, Float.NaN, null, similarityFunction, vectorsScorer, null); + } + + @Override + public int docID() { + return doc; + } + + @Override + public int nextDoc() { + return advance(doc + 1); + } + + @Override + public int advance(int target) { + return doc = NO_MORE_DOCS; + } + + @Override + public byte[] vectorValue() { + throw new UnsupportedOperationException(); + } + + @Override + public DenseOffHeapVectorValues copy() { + throw new UnsupportedOperationException(); + } + + @Override + public Bits getAcceptOrds(Bits acceptDocs) { + return null; + } + + @Override + public VectorScorer scorer(float[] target) throws IOException { + return null; + } + } +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/RandomAccessBinarizedByteVectorValues.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/RandomAccessBinarizedByteVectorValues.java new file mode 100644 index 000000000000..2417353373ba --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/RandomAccessBinarizedByteVectorValues.java @@ -0,0 +1,70 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2024 Elasticsearch B.V. + */ +package org.elasticsearch.index.codec.vectors; + +import org.apache.lucene.util.VectorUtil; +import org.apache.lucene.util.hnsw.RandomAccessVectorValues; + +import java.io.IOException; + +/** + * Copied from Lucene, replace with Lucene's implementation sometime after Lucene 10 + */ +public interface RandomAccessBinarizedByteVectorValues extends RandomAccessVectorValues.Bytes { + /** Returns the centroid distance for the vector */ + float getCentroidDistance(int vectorOrd) throws IOException; + + /** Returns the vector magnitude for the vector */ + float getVectorMagnitude(int vectorOrd) throws IOException; + + /** Returns OOQ corrective factor for the given vector ordinal */ + float getOOQ(int targetOrd) throws IOException; + + /** + * Returns the norm of the target vector w the centroid corrective factor for the given vector + * ordinal + */ + float getNormOC(int targetOrd) throws IOException; + + /** + * Returns the target vector dot product the centroid corrective factor for the given vector + * ordinal + */ + float getODotC(int targetOrd) throws IOException; + + /** + * @return the quantizer used to quantize the vectors + */ + BinaryQuantizer getQuantizer(); + + /** + * @return coarse grained centroids for the vectors + */ + float[] getCentroid() throws IOException; + + @Override + RandomAccessBinarizedByteVectorValues copy() throws IOException; + + default float getCentroidDP() throws IOException { + // this only gets executed on-merge + float[] centroid = getCentroid(); + return VectorUtil.dotProduct(centroid, centroid); + } +} diff --git a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.KnnVectorsFormat b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.KnnVectorsFormat index da2a0c4b90f3..c2201f5b1c31 100644 --- a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.KnnVectorsFormat +++ b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.KnnVectorsFormat @@ -3,3 +3,5 @@ org.elasticsearch.index.codec.vectors.ES813Int8FlatVectorFormat org.elasticsearch.index.codec.vectors.ES814HnswScalarQuantizedVectorsFormat org.elasticsearch.index.codec.vectors.ES815HnswBitVectorsFormat org.elasticsearch.index.codec.vectors.ES815BitFlatVectorFormat +org.elasticsearch.index.codec.vectors.ES816BinaryQuantizedVectorsFormat +org.elasticsearch.index.codec.vectors.ES816HnswBinaryQuantizedVectorsFormat diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/BQVectorUtilsTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/BQVectorUtilsTests.java new file mode 100644 index 000000000000..9f9114c70b6d --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/BQVectorUtilsTests.java @@ -0,0 +1,90 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2024 Elasticsearch B.V. + */ +package org.elasticsearch.index.codec.vectors; + +import org.apache.lucene.tests.util.LuceneTestCase; + +public class BQVectorUtilsTests extends LuceneTestCase { + + public static int popcount(byte[] a, int aOffset, byte[] b, int length) { + int res = 0; + for (int j = 0; j < length; j++) { + int value = (a[aOffset + j] & b[j]) & 0xFF; + for (int k = 0; k < Byte.SIZE; k++) { + if ((value & (1 << k)) != 0) { + ++res; + } + } + } + return res; + } + + private static float DELTA = Float.MIN_VALUE; + + public void testPadFloat() { + assertArrayEquals(new float[] { 1, 2, 3, 4 }, BQVectorUtils.pad(new float[] { 1, 2, 3, 4 }, 4), DELTA); + assertArrayEquals(new float[] { 1, 2, 3, 4 }, BQVectorUtils.pad(new float[] { 1, 2, 3, 4 }, 3), DELTA); + assertArrayEquals(new float[] { 1, 2, 3, 4, 0 }, BQVectorUtils.pad(new float[] { 1, 2, 3, 4 }, 5), DELTA); + } + + public void testPadByte() { + assertArrayEquals(new byte[] { 1, 2, 3, 4 }, BQVectorUtils.pad(new byte[] { 1, 2, 3, 4 }, 4)); + assertArrayEquals(new byte[] { 1, 2, 3, 4 }, BQVectorUtils.pad(new byte[] { 1, 2, 3, 4 }, 3)); + assertArrayEquals(new byte[] { 1, 2, 3, 4, 0 }, BQVectorUtils.pad(new byte[] { 1, 2, 3, 4 }, 5)); + } + + public void testPopCount() { + assertEquals(0, BQVectorUtils.popcount(new byte[] {})); + assertEquals(1, BQVectorUtils.popcount(new byte[] { 1 })); + assertEquals(2, BQVectorUtils.popcount(new byte[] { 2, 1 })); + assertEquals(2, BQVectorUtils.popcount(new byte[] { 8, 0, 1 })); + assertEquals(4, BQVectorUtils.popcount(new byte[] { 7, 1 })); + + int iterations = atLeast(50); + for (int i = 0; i < iterations; i++) { + int size = random().nextInt(5000); + var a = new byte[size]; + random().nextBytes(a); + assertEquals(popcount(a, 0, a, size), BQVectorUtils.popcount(a)); + } + } + + public void testNorm() { + assertEquals(3.0f, BQVectorUtils.norm(new float[] { 3 }), DELTA); + assertEquals(5.0f, BQVectorUtils.norm(new float[] { 5 }), DELTA); + assertEquals(4.0f, BQVectorUtils.norm(new float[] { 2, 2, 2, 2 }), DELTA); + assertEquals(9.0f, BQVectorUtils.norm(new float[] { 3, 3, 3, 3, 3, 3, 3, 3, 3 }), DELTA); + } + + public void testSubtract() { + assertArrayEquals(new float[] { 1 }, BQVectorUtils.subtract(new float[] { 3 }, new float[] { 2 }), DELTA); + assertArrayEquals(new float[] { 2, 1, 0 }, BQVectorUtils.subtract(new float[] { 3, 3, 3 }, new float[] { 1, 2, 3 }), DELTA); + } + + public void testSubtractInPlace() { + var a = new float[] { 3 }; + BQVectorUtils.subtractInPlace(a, new float[] { 2 }); + assertArrayEquals(new float[] { 1 }, a, DELTA); + + a = new float[] { 3, 3, 3 }; + BQVectorUtils.subtractInPlace(a, new float[] { 1, 2, 3 }); + assertArrayEquals(new float[] { 2, 1, 0 }, a, DELTA); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/BinaryQuantizationTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/BinaryQuantizationTests.java new file mode 100644 index 000000000000..32d717bd76f9 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/BinaryQuantizationTests.java @@ -0,0 +1,1856 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2024 Elasticsearch B.V. + */ +package org.elasticsearch.index.codec.vectors; + +import org.apache.lucene.index.VectorSimilarityFunction; +import org.apache.lucene.tests.util.LuceneTestCase; +import org.apache.lucene.util.VectorUtil; + +import java.util.Random; + +public class BinaryQuantizationTests extends LuceneTestCase { + + public void testQuantizeForIndex() { + int dimensions = random().nextInt(1, 4097); + int discretizedDimensions = BQVectorUtils.discretize(dimensions, 64); + + int randIdx = random().nextInt(VectorSimilarityFunction.values().length); + VectorSimilarityFunction similarityFunction = VectorSimilarityFunction.values()[randIdx]; + + BinaryQuantizer quantizer = new BinaryQuantizer(discretizedDimensions, similarityFunction); + + float[] centroid = new float[dimensions]; + for (int i = 0; i < dimensions; i++) { + centroid[i] = random().nextFloat(-50f, 50f); + } + + float[] vector = new float[dimensions]; + for (int i = 0; i < dimensions; i++) { + vector[i] = random().nextFloat(-50f, 50f); + } + if (similarityFunction == VectorSimilarityFunction.COSINE) { + VectorUtil.l2normalize(vector); + VectorUtil.l2normalize(centroid); + } + + byte[] destination = new byte[discretizedDimensions / 8]; + float[] corrections = quantizer.quantizeForIndex(vector, destination, centroid); + + for (float correction : corrections) { + assertFalse(Float.isNaN(correction)); + } + + if (similarityFunction != VectorSimilarityFunction.EUCLIDEAN) { + assertEquals(3, corrections.length); + assertTrue(corrections[0] >= 0); + assertTrue(corrections[1] > 0); + } else { + assertEquals(2, corrections.length); + assertTrue(corrections[0] > 0); + assertTrue(corrections[1] > 0); + } + } + + public void testQuantizeForQuery() { + int dimensions = random().nextInt(1, 4097); + int discretizedDimensions = BQVectorUtils.discretize(dimensions, 64); + + int randIdx = random().nextInt(VectorSimilarityFunction.values().length); + VectorSimilarityFunction similarityFunction = VectorSimilarityFunction.values()[randIdx]; + + BinaryQuantizer quantizer = new BinaryQuantizer(discretizedDimensions, similarityFunction); + + float[] centroid = new float[dimensions]; + for (int i = 0; i < dimensions; i++) { + centroid[i] = random().nextFloat(-50f, 50f); + } + + float[] vector = new float[dimensions]; + for (int i = 0; i < dimensions; i++) { + vector[i] = random().nextFloat(-50f, 50f); + } + if (similarityFunction == VectorSimilarityFunction.COSINE) { + VectorUtil.l2normalize(vector); + VectorUtil.l2normalize(centroid); + } + float cDotC = VectorUtil.dotProduct(centroid, centroid); + byte[] destination = new byte[discretizedDimensions / 8 * BQSpaceUtils.B_QUERY]; + BinaryQuantizer.QueryFactors corrections = quantizer.quantizeForQuery(vector, destination, centroid); + + if (similarityFunction != VectorSimilarityFunction.EUCLIDEAN) { + int sumQ = corrections.quantizedSum(); + float distToC = corrections.distToC(); + float lower = corrections.lower(); + float width = corrections.width(); + float normVmC = corrections.normVmC(); + float vDotC = corrections.vDotC(); + assertTrue(sumQ >= 0); + assertTrue(distToC >= 0); + assertFalse(Float.isNaN(lower)); + assertTrue(width >= 0); + assertTrue(normVmC >= 0); + assertFalse(Float.isNaN(vDotC)); + assertTrue(cDotC >= 0); + } else { + int sumQ = corrections.quantizedSum(); + float distToC = corrections.distToC(); + float lower = corrections.lower(); + float width = corrections.width(); + assertTrue(sumQ >= 0); + assertTrue(distToC >= 0); + assertFalse(Float.isNaN(lower)); + assertTrue(width >= 0); + assertEquals(corrections.normVmC(), 0.0f, 0.01f); + assertEquals(corrections.vDotC(), 0.0f, 0.01f); + } + } + + public void testQuantizeForIndexEuclidean() { + int dimensions = 128; + + BinaryQuantizer quantizer = new BinaryQuantizer(dimensions, VectorSimilarityFunction.EUCLIDEAN); + float[] vector = new float[] { + 0f, + 0.0f, + 16.0f, + 35.0f, + 5.0f, + 32.0f, + 31.0f, + 14.0f, + 10.0f, + 11.0f, + 78.0f, + 55.0f, + 10.0f, + 45.0f, + 83.0f, + 11.0f, + 6.0f, + 14.0f, + 57.0f, + 102.0f, + 75.0f, + 20.0f, + 8.0f, + 3.0f, + 5.0f, + 67.0f, + 17.0f, + 19.0f, + 26.0f, + 5.0f, + 0.0f, + 1.0f, + 22.0f, + 60.0f, + 26.0f, + 7.0f, + 1.0f, + 18.0f, + 22.0f, + 84.0f, + 53.0f, + 85.0f, + 119.0f, + 119.0f, + 4.0f, + 24.0f, + 18.0f, + 7.0f, + 7.0f, + 1.0f, + 81.0f, + 106.0f, + 102.0f, + 72.0f, + 30.0f, + 6.0f, + 0.0f, + 9.0f, + 1.0f, + 9.0f, + 119.0f, + 72.0f, + 1.0f, + 4.0f, + 33.0f, + 119.0f, + 29.0f, + 6.0f, + 1.0f, + 0.0f, + 1.0f, + 14.0f, + 52.0f, + 119.0f, + 30.0f, + 3.0f, + 0.0f, + 0.0f, + 55.0f, + 92.0f, + 111.0f, + 2.0f, + 5.0f, + 4.0f, + 9.0f, + 22.0f, + 89.0f, + 96.0f, + 14.0f, + 1.0f, + 0.0f, + 1.0f, + 82.0f, + 59.0f, + 16.0f, + 20.0f, + 5.0f, + 25.0f, + 14.0f, + 11.0f, + 4.0f, + 0.0f, + 0.0f, + 1.0f, + 26.0f, + 47.0f, + 23.0f, + 4.0f, + 0.0f, + 0.0f, + 4.0f, + 38.0f, + 83.0f, + 30.0f, + 14.0f, + 9.0f, + 4.0f, + 9.0f, + 17.0f, + 23.0f, + 41.0f, + 0.0f, + 0.0f, + 2.0f, + 8.0f, + 19.0f, + 25.0f, + 23.0f }; + byte[] destination = new byte[dimensions / 8]; + float[] centroid = new float[] { + 27.054054f, + 22.252253f, + 25.027027f, + 23.55856f, + 31.099098f, + 28.765766f, + 31.64865f, + 30.981981f, + 24.675676f, + 21.81982f, + 26.72973f, + 25.486486f, + 30.504505f, + 35.216217f, + 28.306307f, + 24.486486f, + 29.675676f, + 26.153152f, + 31.315315f, + 25.225225f, + 29.234234f, + 30.855856f, + 24.495495f, + 29.828829f, + 31.54955f, + 24.36937f, + 25.108109f, + 24.873875f, + 22.918919f, + 24.918919f, + 29.027027f, + 25.513514f, + 27.64865f, + 28.405405f, + 23.603603f, + 17.900902f, + 22.522522f, + 24.855856f, + 31.396397f, + 32.585587f, + 26.297297f, + 27.468468f, + 19.675676f, + 19.018019f, + 24.801802f, + 30.27928f, + 27.945946f, + 25.324324f, + 29.918919f, + 27.864864f, + 28.081081f, + 23.45946f, + 28.828829f, + 28.387388f, + 25.387388f, + 27.90991f, + 25.621622f, + 21.585585f, + 26.378378f, + 24.144144f, + 21.666666f, + 22.72973f, + 26.837837f, + 22.747747f, + 29.0f, + 28.414415f, + 24.612612f, + 21.594595f, + 19.117117f, + 24.045046f, + 30.612612f, + 27.55856f, + 25.117117f, + 27.783783f, + 21.639639f, + 19.36937f, + 21.252253f, + 29.153152f, + 29.216217f, + 24.747747f, + 28.252253f, + 25.288288f, + 25.738739f, + 23.44144f, + 24.423424f, + 23.693693f, + 26.306307f, + 29.162163f, + 28.684685f, + 34.648647f, + 25.576576f, + 25.288288f, + 29.63063f, + 20.225225f, + 25.72973f, + 29.009008f, + 28.666666f, + 29.243244f, + 26.36937f, + 25.864864f, + 21.522522f, + 21.414415f, + 25.963964f, + 26.054054f, + 25.099098f, + 30.477478f, + 29.55856f, + 24.837837f, + 24.801802f, + 21.18018f, + 24.027027f, + 26.360361f, + 33.153152f, + 29.135136f, + 30.486486f, + 28.639639f, + 27.576576f, + 24.486486f, + 26.297297f, + 21.774775f, + 25.936937f, + 35.36937f, + 25.171171f, + 30.405405f, + 31.522522f, + 29.765766f, + 22.324324f, + 26.09009f }; + float[] corrections = quantizer.quantizeForIndex(vector, destination, centroid); + + assertEquals(2, corrections.length); + float distToCentroid = corrections[0]; + float magnitude = corrections[1]; + + assertEquals(387.90204f, distToCentroid, 0.0003f); + assertEquals(0.75916624f, magnitude, 0.0000001f); + assertArrayEquals(new byte[] { 20, 54, 56, 72, 97, -16, 62, 12, -32, -29, -125, 12, 0, -63, -63, -126 }, destination); + } + + public void testQuantizeForQueryEuclidean() { + int dimensions = 128; + + BinaryQuantizer quantizer = new BinaryQuantizer(dimensions, VectorSimilarityFunction.EUCLIDEAN); + float[] vector = new float[] { + 0.0f, + 8.0f, + 69.0f, + 45.0f, + 2.0f, + 0f, + 16.0f, + 52.0f, + 32.0f, + 13.0f, + 2.0f, + 6.0f, + 34.0f, + 49.0f, + 45.0f, + 83.0f, + 6.0f, + 2.0f, + 26.0f, + 57.0f, + 14.0f, + 46.0f, + 19.0f, + 9.0f, + 4.0f, + 13.0f, + 53.0f, + 104.0f, + 33.0f, + 11.0f, + 25.0f, + 19.0f, + 30.0f, + 10.0f, + 7.0f, + 2.0f, + 8.0f, + 7.0f, + 25.0f, + 1.0f, + 2.0f, + 25.0f, + 24.0f, + 28.0f, + 61.0f, + 83.0f, + 41.0f, + 9.0f, + 14.0f, + 3.0f, + 7.0f, + 114.0f, + 114.0f, + 114.0f, + 114.0f, + 5.0f, + 5.0f, + 1.0f, + 5.0f, + 114.0f, + 73.0f, + 75.0f, + 106.0f, + 3.0f, + 5.0f, + 6.0f, + 6.0f, + 8.0f, + 15.0f, + 45.0f, + 2.0f, + 15.0f, + 7.0f, + 114.0f, + 103.0f, + 6.0f, + 5.0f, + 4.0f, + 9.0f, + 67.0f, + 47.0f, + 22.0f, + 32.0f, + 27.0f, + 41.0f, + 10.0f, + 114.0f, + 36.0f, + 43.0f, + 42.0f, + 23.0f, + 9.0f, + 7.0f, + 30.0f, + 114.0f, + 19.0f, + 7.0f, + 5.0f, + 6.0f, + 6.0f, + 21.0f, + 48.0f, + 2.0f, + 1.0f, + 0.0f, + 8.0f, + 114.0f, + 13.0f, + 0.0f, + 1.0f, + 53.0f, + 83.0f, + 14.0f, + 8.0f, + 16.0f, + 12.0f, + 16.0f, + 20.0f, + 27.0f, + 87.0f, + 45.0f, + 50.0f, + 15.0f, + 5.0f, + 5.0f, + 6.0f, + 32.0f, + 49.0f }; + byte[] destination = new byte[dimensions / 8 * BQSpaceUtils.B_QUERY]; + float[] centroid = new float[] { + 26.7f, + 16.2f, + 10.913f, + 10.314f, + 12.12f, + 14.045f, + 15.887f, + 16.864f, + 32.232f, + 31.567f, + 34.922f, + 21.624f, + 16.349f, + 29.625f, + 31.994f, + 22.044f, + 37.847f, + 24.622f, + 36.299f, + 27.966f, + 14.368f, + 19.248f, + 30.778f, + 35.927f, + 27.019f, + 16.381f, + 17.325f, + 16.517f, + 13.272f, + 9.154f, + 9.242f, + 17.995f, + 53.777f, + 23.011f, + 12.929f, + 16.128f, + 22.16f, + 28.643f, + 25.861f, + 27.197f, + 59.883f, + 40.878f, + 34.153f, + 22.795f, + 24.402f, + 37.427f, + 34.19f, + 29.288f, + 61.812f, + 26.355f, + 39.071f, + 37.789f, + 23.33f, + 22.299f, + 28.64f, + 47.828f, + 52.457f, + 21.442f, + 24.039f, + 29.781f, + 27.707f, + 19.484f, + 14.642f, + 28.757f, + 54.567f, + 20.936f, + 25.112f, + 25.521f, + 22.077f, + 18.272f, + 14.526f, + 29.054f, + 61.803f, + 24.509f, + 37.517f, + 35.906f, + 24.106f, + 22.64f, + 32.1f, + 48.788f, + 60.102f, + 39.625f, + 34.766f, + 22.497f, + 24.397f, + 41.599f, + 38.419f, + 30.99f, + 55.647f, + 25.115f, + 14.96f, + 18.882f, + 26.918f, + 32.442f, + 26.231f, + 27.107f, + 26.828f, + 15.968f, + 18.668f, + 14.071f, + 10.906f, + 8.989f, + 9.721f, + 17.294f, + 36.32f, + 21.854f, + 35.509f, + 27.106f, + 14.067f, + 19.82f, + 33.582f, + 35.997f, + 33.528f, + 30.369f, + 36.955f, + 21.23f, + 15.2f, + 30.252f, + 34.56f, + 22.295f, + 29.413f, + 16.576f, + 11.226f, + 10.754f, + 12.936f, + 15.525f, + 15.868f, + 16.43f }; + BinaryQuantizer.QueryFactors corrections = quantizer.quantizeForQuery(vector, destination, centroid); + + int sumQ = corrections.quantizedSum(); + float lower = corrections.lower(); + float width = corrections.width(); + + assertEquals(729, sumQ); + assertEquals(-57.883f, lower, 0.001f); + assertEquals(9.972266f, width, 0.000001f); + assertArrayEquals( + new byte[] { + -77, + -49, + 73, + -17, + -89, + 9, + -43, + -27, + 40, + 15, + 42, + 76, + -122, + 38, + -22, + -37, + -96, + 111, + -63, + -102, + -123, + 23, + 110, + 127, + 32, + 95, + 29, + 106, + -120, + -121, + -32, + -94, + 78, + -98, + 42, + 95, + 122, + 114, + 30, + 18, + 91, + 97, + -5, + -9, + 123, + 122, + 31, + -66, + 49, + 1, + 20, + 48, + 0, + 12, + 30, + 30, + 4, + 96, + 2, + 2, + 4, + 33, + 1, + 65 }, + destination + ); + } + + private float[] generateRandomFloatArray(Random random, int dimensions, float lowerBoundInclusive, float upperBoundExclusive) { + float[] data = new float[dimensions]; + for (int i = 0; i < dimensions; i++) { + data[i] = random.nextFloat(lowerBoundInclusive, upperBoundExclusive); + } + return data; + } + + public void testQuantizeForIndexMIP() { + int dimensions = 768; + + // we want fixed values for these arrays so define our own random generation here to track + // quantization changes + Random random = new Random(42); + + float[] mipVectorToIndex = generateRandomFloatArray(random, dimensions, -1f, 1f); + float[] mipCentroid = generateRandomFloatArray(random, dimensions, -1f, 1f); + + VectorSimilarityFunction[] similarityFunctionsActingLikeEucllidean = new VectorSimilarityFunction[] { + VectorSimilarityFunction.MAXIMUM_INNER_PRODUCT, + VectorSimilarityFunction.DOT_PRODUCT }; + int randIdx = random().nextInt(similarityFunctionsActingLikeEucllidean.length); + VectorSimilarityFunction similarityFunction = similarityFunctionsActingLikeEucllidean[randIdx]; + + BinaryQuantizer quantizer = new BinaryQuantizer(dimensions, similarityFunction); + float[] vector = mipVectorToIndex; + byte[] destination = new byte[dimensions / 8]; + float[] centroid = mipCentroid; + float[] corrections = quantizer.quantizeForIndex(vector, destination, centroid); + + assertEquals(3, corrections.length); + float ooq = corrections[0]; + float normOC = corrections[1]; + float oDotC = corrections[2]; + + assertEquals(0.8141399f, ooq, 0.0000001f); + assertEquals(21.847124f, normOC, 0.00001f); + assertEquals(6.4300356f, oDotC, 0.0001f); + assertArrayEquals( + new byte[] { + -83, + -91, + -71, + 97, + 32, + -96, + 89, + -80, + -19, + -108, + 3, + 113, + -111, + 12, + -86, + 32, + -43, + 76, + 122, + -106, + -83, + -37, + -122, + 118, + 84, + -72, + 34, + 20, + 57, + -29, + 119, + -8, + -10, + -100, + -109, + 62, + -54, + 53, + -44, + 8, + -16, + 80, + 58, + 50, + 105, + -25, + 47, + 115, + -106, + -92, + -122, + -44, + 8, + 18, + -23, + 24, + -15, + 62, + 58, + 111, + 99, + -116, + -111, + -5, + 101, + -69, + -32, + -74, + -105, + 113, + -89, + 44, + 100, + -93, + -80, + 82, + -64, + 91, + -87, + -95, + 115, + 6, + 76, + 110, + 101, + 39, + 108, + 72, + 2, + 112, + -63, + -43, + 105, + -42, + 9, + -128 }, + destination + ); + } + + public void testQuantizeForQueryMIP() { + int dimensions = 768; + + // we want fixed values for these arrays so define our own random generation here to track + // quantization changes + Random random = new Random(42); + + float[] mipVectorToQuery = generateRandomFloatArray(random, dimensions, -1f, 1f); + float[] mipCentroid = generateRandomFloatArray(random, dimensions, -1f, 1f); + + VectorSimilarityFunction[] similarityFunctionsActingLikeEucllidean = new VectorSimilarityFunction[] { + VectorSimilarityFunction.MAXIMUM_INNER_PRODUCT, + VectorSimilarityFunction.DOT_PRODUCT }; + int randIdx = random().nextInt(similarityFunctionsActingLikeEucllidean.length); + VectorSimilarityFunction similarityFunction = similarityFunctionsActingLikeEucllidean[randIdx]; + + BinaryQuantizer quantizer = new BinaryQuantizer(dimensions, similarityFunction); + float[] vector = mipVectorToQuery; + byte[] destination = new byte[dimensions / 8 * BQSpaceUtils.B_QUERY]; + float[] centroid = mipCentroid; + float cDotC = VectorUtil.dotProduct(centroid, centroid); + BinaryQuantizer.QueryFactors corrections = quantizer.quantizeForQuery(vector, destination, centroid); + + int sumQ = corrections.quantizedSum(); + float lower = corrections.lower(); + float width = corrections.width(); + float normVmC = corrections.normVmC(); + float vDotC = corrections.vDotC(); + + assertEquals(5272, sumQ); + assertEquals(-0.08603752f, lower, 0.00000001f); + assertEquals(0.011431276f, width, 0.00000001f); + assertEquals(21.847124f, normVmC, 0.00001f); + assertEquals(6.4300356f, vDotC, 0.0001f); + assertEquals(252.37146f, cDotC, 0.0001f); + assertArrayEquals( + new byte[] { + -81, + 19, + 67, + 33, + 112, + 8, + 40, + -5, + -19, + 115, + -87, + -63, + -59, + 12, + -2, + -127, + -23, + 43, + 24, + 16, + -69, + 112, + -22, + 75, + -81, + -50, + 100, + -41, + 3, + -120, + -93, + -4, + 4, + 125, + 34, + -57, + -109, + 89, + -63, + -35, + -116, + 4, + 35, + 93, + -26, + -88, + -55, + -86, + 63, + -46, + -122, + -96, + -26, + 124, + -64, + 21, + 96, + 46, + 98, + 97, + 88, + -98, + -83, + 121, + 16, + -14, + -89, + -118, + 65, + -39, + -111, + -35, + 113, + 108, + 111, + 86, + 17, + -69, + -47, + 72, + 1, + 36, + 17, + 113, + -87, + -5, + -46, + -37, + -2, + 93, + -123, + 118, + 4, + -12, + -33, + 95, + 32, + -63, + -97, + -109, + 27, + 111, + 42, + -57, + -87, + -41, + -73, + -106, + 27, + -31, + 32, + -1, + 9, + -88, + -35, + -11, + -103, + 5, + 27, + -127, + 108, + 127, + -119, + 58, + 38, + 18, + -103, + -27, + -63, + 56, + 77, + -13, + 3, + -40, + -127, + 37, + 82, + -87, + -26, + -45, + -14, + 18, + -50, + 76, + 25, + 37, + -12, + 106, + 17, + 115, + 0, + 23, + -109, + 26, + -110, + 17, + -35, + 111, + 4, + 60, + 58, + -64, + -104, + -125, + 23, + -58, + 89, + -117, + 104, + -71, + 3, + -89, + -26, + 46, + 15, + 82, + -83, + -75, + -72, + -69, + 20, + -38, + -47, + 109, + -66, + -66, + -89, + 108, + -122, + -3, + -69, + -85, + 18, + 59, + 85, + -97, + -114, + 95, + 2, + -84, + -77, + 121, + -6, + 10, + 110, + -13, + -123, + -34, + 106, + -71, + -107, + 123, + 67, + -111, + 58, + 52, + -53, + 87, + -113, + -21, + -44, + 26, + 10, + -62, + 56, + 111, + 36, + -126, + 26, + 94, + -88, + -13, + -113, + -50, + -9, + -115, + 84, + 8, + -32, + -102, + -4, + 89, + 29, + 75, + -73, + -19, + 22, + -90, + 76, + -61, + 4, + -48, + -100, + -11, + 107, + 20, + -39, + -98, + 123, + 77, + 104, + 9, + 9, + 91, + -105, + -40, + -106, + -87, + 38, + 48, + 60, + 29, + -68, + 124, + -78, + -63, + -101, + -115, + 67, + -17, + 101, + -53, + 121, + 44, + -78, + -12, + 110, + 91, + -83, + -92, + -72, + 96, + 32, + -96, + 89, + 48, + 76, + -124, + 3, + 113, + -111, + 12, + -86, + 32, + -43, + 68, + 106, + -122, + -84, + -37, + -124, + 118, + 84, + -72, + 34, + 20, + 57, + -29, + 119, + 56, + -10, + -108, + -109, + 60, + -56, + 37, + 84, + 8, + -16, + 80, + 24, + 50, + 41, + -25, + 47, + 115, + -122, + -92, + -126, + -44, + 8, + 18, + -23, + 24, + -15, + 60, + 58, + 111, + 99, + -120, + -111, + -21, + 101, + 59, + -32, + -74, + -105, + 113, + -90, + 36, + 100, + -93, + -80, + 82, + -64, + 91, + -87, + -95, + 115, + 6, + 76, + 110, + 101, + 39, + 44, + 0, + 2, + 112, + -64, + -47, + 105, + 2, + 1, + -128 }, + destination + ); + } + + public void testQuantizeForIndexCosine() { + int dimensions = 768; + + // we want fixed values for these arrays so define our own random generation here to track + // quantization changes + Random random = new Random(42); + + float[] mipVectorToIndex = generateRandomFloatArray(random, dimensions, -1f, 1f); + float[] mipCentroid = generateRandomFloatArray(random, dimensions, -1f, 1f); + + mipVectorToIndex = VectorUtil.l2normalize(mipVectorToIndex); + mipCentroid = VectorUtil.l2normalize(mipCentroid); + + BinaryQuantizer quantizer = new BinaryQuantizer(dimensions, VectorSimilarityFunction.COSINE); + float[] vector = mipVectorToIndex; + byte[] destination = new byte[dimensions / 8]; + float[] centroid = mipCentroid; + float[] corrections = quantizer.quantizeForIndex(vector, destination, centroid); + + assertEquals(3, corrections.length); + float ooq = corrections[0]; + float normOC = corrections[1]; + float oDotC = corrections[2]; + + assertEquals(0.8145253f, ooq, 0.000001f); + assertEquals(1.3955297f, normOC, 0.00001f); + assertEquals(0.026248248f, oDotC, 0.0001f); + assertArrayEquals( + new byte[] { + -83, + -91, + -71, + 97, + 32, + -96, + 89, + -80, + -20, + -108, + 3, + 113, + -111, + 12, + -86, + 32, + -43, + 76, + 122, + -106, + -83, + -37, + -122, + 118, + 84, + -72, + 34, + 20, + 57, + -29, + 119, + -72, + -10, + -100, + -109, + 62, + -54, + 117, + -44, + 8, + -16, + 80, + 58, + 50, + 41, + -25, + 47, + 115, + -106, + -92, + -122, + -44, + 8, + 18, + -23, + 24, + -15, + 62, + 58, + 111, + 99, + -116, + -111, + -21, + 101, + -69, + -32, + -74, + -105, + 113, + -90, + 44, + 100, + -93, + -80, + 82, + -64, + 91, + -87, + -95, + 115, + 6, + 76, + 110, + 101, + 39, + 44, + 72, + 2, + 112, + -63, + -43, + 105, + -42, + 9, + -126 }, + destination + ); + } + + public void testQuantizeForQueryCosine() { + int dimensions = 768; + + // we want fixed values for these arrays so define our own random generation here to track + // quantization changes + Random random = new Random(42); + + float[] mipVectorToQuery = generateRandomFloatArray(random, dimensions, -1f, 1f); + float[] mipCentroid = generateRandomFloatArray(random, dimensions, -1f, 1f); + + mipVectorToQuery = VectorUtil.l2normalize(mipVectorToQuery); + mipCentroid = VectorUtil.l2normalize(mipCentroid); + + BinaryQuantizer quantizer = new BinaryQuantizer(dimensions, VectorSimilarityFunction.COSINE); + float[] vector = mipVectorToQuery; + byte[] destination = new byte[dimensions / 8 * BQSpaceUtils.B_QUERY]; + float[] centroid = mipCentroid; + float cDotC = VectorUtil.dotProduct(centroid, centroid); + BinaryQuantizer.QueryFactors corrections = quantizer.quantizeForQuery(vector, destination, centroid); + + int sumQ = corrections.quantizedSum(); + float lower = corrections.lower(); + float width = corrections.width(); + float normVmC = corrections.normVmC(); + float vDotC = corrections.vDotC(); + + assertEquals(5277, sumQ); + assertEquals(-0.086002514f, lower, 0.00000001f); + assertEquals(0.011431345f, width, 0.00000001f); + assertEquals(1.3955297f, normVmC, 0.00001f); + assertEquals(0.026248248f, vDotC, 0.0001f); + assertEquals(1.0f, cDotC, 0.0001f); + assertArrayEquals( + new byte[] { + -83, + 18, + 67, + 37, + 80, + 8, + 40, + -1, + -19, + 115, + -87, + -63, + -59, + 12, + -2, + -63, + -19, + 43, + -104, + 16, + -69, + 80, + -22, + 75, + -81, + -50, + 100, + -41, + 7, + -88, + -93, + -4, + 4, + 117, + 34, + -57, + -109, + 89, + -63, + -35, + -116, + 4, + 35, + 93, + -26, + -88, + -56, + -82, + 63, + -46, + -122, + -96, + -26, + 124, + -64, + 21, + 96, + 46, + 114, + 101, + 92, + -98, + -83, + 121, + 48, + -14, + -89, + -118, + 65, + -47, + -79, + -35, + 113, + 110, + 111, + 70, + 17, + -69, + -47, + 64, + 1, + 102, + 19, + 113, + -87, + -5, + -46, + -34, + -2, + 93, + -123, + 102, + 4, + -12, + 127, + 95, + 32, + -64, + -97, + -105, + 59, + 111, + 42, + -57, + -87, + -41, + -73, + -106, + 27, + -31, + 32, + -65, + 9, + -88, + 93, + -11, + -103, + 37, + 27, + -127, + 108, + 127, + -119, + 58, + 38, + 18, + -103, + -27, + -63, + 48, + 77, + -13, + 3, + -40, + -127, + 37, + 82, + -87, + -26, + -45, + -14, + 18, + -49, + 76, + 25, + 37, + -12, + 106, + 17, + 115, + 0, + 23, + -109, + 26, + -126, + 21, + -35, + 111, + 4, + 60, + 58, + -64, + -104, + -125, + 23, + -58, + 121, + -117, + 104, + -69, + 3, + -89, + -26, + 46, + 15, + 90, + -83, + -73, + -72, + -69, + 20, + -38, + -47, + 109, + -66, + -66, + -89, + 108, + -122, + -3, + 59, + -85, + 18, + 58, + 85, + -101, + -114, + 95, + 2, + -84, + -77, + 121, + -6, + 10, + 110, + -13, + -123, + -34, + 106, + -71, + -107, + 123, + 67, + -111, + 58, + 52, + -53, + 87, + -113, + -21, + -44, + 26, + 10, + -62, + 56, + 103, + 36, + -126, + 26, + 94, + -88, + -13, + -113, + -50, + -9, + -115, + 84, + 8, + -32, + -102, + -4, + 89, + 29, + 75, + -73, + -19, + 22, + -90, + 76, + -61, + 4, + -44, + -100, + -11, + 107, + 20, + -39, + -98, + 123, + 77, + 104, + 9, + 41, + 91, + -105, + -38, + -106, + -87, + 38, + 48, + 60, + 29, + -68, + 126, + -78, + -63, + -101, + -115, + 67, + -17, + 101, + -53, + 121, + 44, + -78, + -12, + -18, + 91, + -83, + -91, + -72, + 96, + 32, + -96, + 89, + 48, + 76, + -124, + 3, + 113, + -111, + 12, + -86, + 32, + -43, + 68, + 106, + -122, + -84, + -37, + -124, + 118, + 84, + -72, + 34, + 20, + 57, + -29, + 119, + 56, + -10, + -100, + -109, + 60, + -56, + 37, + 84, + 8, + -16, + 80, + 24, + 50, + 41, + -25, + 47, + 115, + -122, + -92, + -126, + -44, + 8, + 18, + -23, + 24, + -15, + 60, + 58, + 107, + 99, + -120, + -111, + -21, + 101, + 59, + -32, + -74, + -105, + 113, + -122, + 36, + 100, + -95, + -80, + 82, + -64, + 91, + -87, + -95, + 115, + 4, + 76, + 110, + 101, + 39, + 44, + 0, + 2, + 112, + -64, + -47, + 105, + 2, + 1, + -128 }, + destination + ); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorerTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorerTests.java new file mode 100644 index 000000000000..4ac66a9f63a3 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorerTests.java @@ -0,0 +1,1746 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2024 Elasticsearch B.V. + */ +package org.elasticsearch.index.codec.vectors; + +import org.apache.lucene.index.VectorSimilarityFunction; +import org.apache.lucene.tests.util.LuceneTestCase; +import org.apache.lucene.util.VectorUtil; +import org.elasticsearch.common.logging.LogConfigurator; + +import java.io.IOException; + +public class ES816BinaryFlatVectorsScorerTests extends LuceneTestCase { + + static { + LogConfigurator.loadLog4jPlugins(); + LogConfigurator.configureESLogging(); // native access requires logging to be initialized + } + + public void testScore() throws IOException { + int dimensions = random().nextInt(1, 4097); + int discretizedDimensions = BQVectorUtils.discretize(dimensions, 64); + + int randIdx = random().nextInt(VectorSimilarityFunction.values().length); + VectorSimilarityFunction similarityFunction = VectorSimilarityFunction.values()[randIdx]; + + float[] centroid = new float[dimensions]; + for (int j = 0; j < dimensions; j++) { + centroid[j] = random().nextFloat(-50f, 50f); + } + if (similarityFunction == VectorSimilarityFunction.COSINE) { + VectorUtil.l2normalize(centroid); + } + + byte[] vector = new byte[discretizedDimensions / 8 * BQSpaceUtils.B_QUERY]; + random().nextBytes(vector); + float distanceToCentroid = random().nextFloat(0f, 10_000.0f); + float vl = random().nextFloat(-1000f, 1000f); + float width = random().nextFloat(0f, 1000f); + short quantizedSum = (short) random().nextInt(0, 4097); + float normVmC = random().nextFloat(-1000f, 1000f); + float vDotC = random().nextFloat(-1000f, 1000f); + ES816BinaryFlatVectorsScorer.BinaryQueryVector queryVector = new ES816BinaryFlatVectorsScorer.BinaryQueryVector( + vector, + new BinaryQuantizer.QueryFactors(quantizedSum, distanceToCentroid, vl, width, normVmC, vDotC) + ); + + RandomAccessBinarizedByteVectorValues targetVectors = new RandomAccessBinarizedByteVectorValues() { + @Override + public float getCentroidDistance(int vectorOrd) throws IOException { + return random().nextFloat(0f, 1000f); + } + + @Override + public float getVectorMagnitude(int vectorOrd) throws IOException { + return random().nextFloat(0f, 100f); + } + + @Override + public float getOOQ(int targetOrd) throws IOException { + return random().nextFloat(-1000f, 1000f); + } + + @Override + public float getNormOC(int targetOrd) throws IOException { + return random().nextFloat(-1000f, 1000f); + } + + @Override + public float getODotC(int targetOrd) throws IOException { + return random().nextFloat(-1000f, 1000f); + } + + @Override + public BinaryQuantizer getQuantizer() { + int dimensions = 128; + return new BinaryQuantizer(dimensions, dimensions, VectorSimilarityFunction.EUCLIDEAN); + } + + @Override + public float[] getCentroid() throws IOException { + return centroid; + } + + @Override + public RandomAccessBinarizedByteVectorValues copy() throws IOException { + return null; + } + + @Override + public byte[] vectorValue(int targetOrd) throws IOException { + byte[] vectorBytes = new byte[discretizedDimensions / 8]; + random().nextBytes(vectorBytes); + return vectorBytes; + } + + @Override + public int size() { + return 1; + } + + @Override + public int dimension() { + return dimensions; + } + }; + + ES816BinaryFlatVectorsScorer.BinarizedRandomVectorScorer scorer = new ES816BinaryFlatVectorsScorer.BinarizedRandomVectorScorer( + queryVector, + targetVectors, + similarityFunction + ); + + float score = scorer.score(0); + + assertTrue(score >= 0f); + } + + public void testScoreEuclidean() throws IOException { + int dimensions = 128; + + byte[] vector = new byte[] { + -8, + 10, + -27, + 112, + -83, + 36, + -36, + -122, + -114, + 82, + 55, + 33, + -33, + 120, + 55, + -99, + -93, + -86, + -55, + 21, + -121, + 30, + 111, + 30, + 0, + 82, + 21, + 38, + -120, + -127, + 40, + -32, + 78, + -37, + 42, + -43, + 122, + 115, + 30, + 115, + 123, + 108, + -13, + -65, + 123, + 124, + -33, + -68, + 49, + 5, + 20, + 58, + 0, + 12, + 30, + 30, + 4, + 97, + 10, + 66, + 4, + 35, + 1, + 67 }; + float distanceToCentroid = 157799.12f; + float vl = -57.883f; + float width = 9.972266f; + short quantizedSum = 795; + ES816BinaryFlatVectorsScorer.BinaryQueryVector queryVector = new ES816BinaryFlatVectorsScorer.BinaryQueryVector( + vector, + new BinaryQuantizer.QueryFactors(quantizedSum, distanceToCentroid, vl, width, 0f, 0f) + ); + + RandomAccessBinarizedByteVectorValues targetVectors = new RandomAccessBinarizedByteVectorValues() { + @Override + public float getCentroidDistance(int vectorOrd) { + return 355.78073f; + } + + @Override + public float getVectorMagnitude(int vectorOrd) { + return 0.7636705f; + } + + @Override + public float getOOQ(int targetOrd) { + return 0; + } + + @Override + public float getNormOC(int targetOrd) { + return 0; + } + + @Override + public float getODotC(int targetOrd) { + return 0; + } + + @Override + public BinaryQuantizer getQuantizer() { + int dimensions = 128; + return new BinaryQuantizer(dimensions, dimensions, VectorSimilarityFunction.EUCLIDEAN); + } + + @Override + public float[] getCentroid() { + return new float[] { + 26.7f, + 16.2f, + 10.913f, + 10.314f, + 12.12f, + 14.045f, + 15.887f, + 16.864f, + 32.232f, + 31.567f, + 34.922f, + 21.624f, + 16.349f, + 29.625f, + 31.994f, + 22.044f, + 37.847f, + 24.622f, + 36.299f, + 27.966f, + 14.368f, + 19.248f, + 30.778f, + 35.927f, + 27.019f, + 16.381f, + 17.325f, + 16.517f, + 13.272f, + 9.154f, + 9.242f, + 17.995f, + 53.777f, + 23.011f, + 12.929f, + 16.128f, + 22.16f, + 28.643f, + 25.861f, + 27.197f, + 59.883f, + 40.878f, + 34.153f, + 22.795f, + 24.402f, + 37.427f, + 34.19f, + 29.288f, + 61.812f, + 26.355f, + 39.071f, + 37.789f, + 23.33f, + 22.299f, + 28.64f, + 47.828f, + 52.457f, + 21.442f, + 24.039f, + 29.781f, + 27.707f, + 19.484f, + 14.642f, + 28.757f, + 54.567f, + 20.936f, + 25.112f, + 25.521f, + 22.077f, + 18.272f, + 14.526f, + 29.054f, + 61.803f, + 24.509f, + 37.517f, + 35.906f, + 24.106f, + 22.64f, + 32.1f, + 48.788f, + 60.102f, + 39.625f, + 34.766f, + 22.497f, + 24.397f, + 41.599f, + 38.419f, + 30.99f, + 55.647f, + 25.115f, + 14.96f, + 18.882f, + 26.918f, + 32.442f, + 26.231f, + 27.107f, + 26.828f, + 15.968f, + 18.668f, + 14.071f, + 10.906f, + 8.989f, + 9.721f, + 17.294f, + 36.32f, + 21.854f, + 35.509f, + 27.106f, + 14.067f, + 19.82f, + 33.582f, + 35.997f, + 33.528f, + 30.369f, + 36.955f, + 21.23f, + 15.2f, + 30.252f, + 34.56f, + 22.295f, + 29.413f, + 16.576f, + 11.226f, + 10.754f, + 12.936f, + 15.525f, + 15.868f, + 16.43f }; + } + + @Override + public RandomAccessBinarizedByteVectorValues copy() { + return null; + } + + @Override + public byte[] vectorValue(int targetOrd) { + return new byte[] { 44, 108, 120, -15, -61, -32, 124, 25, -63, -57, 6, 24, 1, -61, 1, 14 }; + } + + @Override + public int size() { + return 1; + } + + @Override + public int dimension() { + return dimensions; + } + }; + + VectorSimilarityFunction similarityFunction = VectorSimilarityFunction.EUCLIDEAN; + + ES816BinaryFlatVectorsScorer.BinarizedRandomVectorScorer scorer = new ES816BinaryFlatVectorsScorer.BinarizedRandomVectorScorer( + queryVector, + targetVectors, + similarityFunction + ); + + assertEquals(1f / (1f + 245482.47f), scorer.score(0), 0.1f); + } + + public void testScoreMIP() throws IOException { + int dimensions = 768; + + byte[] vector = new byte[] { + -76, + 44, + 81, + 31, + 30, + -59, + 56, + -118, + -36, + 45, + -11, + 8, + -61, + 95, + -100, + 18, + -91, + -98, + -46, + 31, + -8, + 82, + -42, + 121, + 75, + -61, + 125, + -21, + -82, + 16, + 21, + 40, + -1, + 12, + -92, + -22, + -49, + -92, + -19, + -32, + -56, + -34, + 60, + -100, + 69, + 13, + 60, + -51, + 90, + 4, + -77, + 63, + 124, + 69, + 88, + 73, + -72, + 29, + -96, + 44, + 69, + -123, + -59, + -94, + 84, + 80, + -61, + 27, + -37, + -92, + -51, + -86, + 19, + -55, + -36, + -2, + 68, + -37, + -128, + 59, + -47, + 119, + -53, + 56, + -12, + 37, + 27, + 119, + -37, + 125, + 78, + 19, + 15, + -9, + 94, + 100, + -72, + 55, + 86, + -48, + 26, + 10, + -112, + 28, + -15, + -64, + -34, + 55, + -42, + -31, + -96, + -18, + 60, + -44, + 69, + 106, + -20, + 15, + 47, + 49, + -122, + -45, + 119, + 101, + 22, + 77, + 108, + -15, + -71, + -28, + -43, + -68, + -127, + -86, + -118, + -51, + 121, + -65, + -10, + -49, + 115, + -6, + -61, + -98, + 21, + 41, + 56, + 29, + -16, + -82, + 4, + 72, + -77, + 23, + 23, + -32, + -98, + 112, + 27, + -4, + 91, + -69, + 102, + -114, + 16, + -20, + -76, + -124, + 43, + 12, + 3, + -30, + 42, + -44, + -88, + -72, + -76, + -94, + -73, + 46, + -17, + 4, + -74, + -44, + 53, + -11, + -117, + -105, + -113, + -37, + -43, + -128, + -70, + 56, + -68, + -100, + 56, + -20, + 77, + 12, + 17, + -119, + -17, + 59, + -10, + -26, + 29, + 42, + -59, + -28, + -28, + 60, + -34, + 60, + -24, + 80, + -81, + 24, + 122, + 127, + 62, + 124, + -5, + -11, + 59, + -52, + 74, + -29, + -116, + 3, + -40, + -99, + -24, + 11, + -10, + 95, + 21, + -38, + 59, + -52, + 29, + 58, + 112, + 100, + -106, + -90, + 71, + 72, + 57, + 95, + 98, + 96, + -41, + -16, + 50, + -18, + 123, + -36, + 74, + -101, + 17, + 50, + 48, + 96, + 57, + 7, + 81, + -16, + -32, + -102, + -24, + -71, + -10, + 37, + -22, + 94, + -36, + -52, + -71, + -47, + 47, + -1, + -31, + -10, + -126, + -15, + -123, + -59, + 71, + -49, + 67, + 99, + -57, + 21, + -93, + -13, + -18, + 54, + -112, + -60, + 9, + 25, + -30, + -47, + 26, + 27, + 26, + -63, + 1, + -63, + 18, + -114, + 80, + 110, + -123, + 0, + -63, + -126, + -128, + 10, + -60, + 51, + -71, + 28, + 114, + -4, + 53, + 10, + 23, + -96, + 9, + 32, + -22, + 5, + -108, + 33, + 98, + -59, + -106, + -126, + 73, + 72, + -72, + -73, + -60, + -96, + -99, + 31, + 40, + 15, + -19, + 17, + -128, + 33, + -75, + 96, + -18, + -47, + 75, + 27, + -60, + -16, + -82, + 13, + 21, + 37, + 23, + 70, + 9, + -39, + 16, + -127, + 35, + -78, + 64, + 99, + -46, + 1, + 28, + 65, + 125, + 14, + 42, + 26 }; + float distanceToCentroid = 95.39032f; + float vl = -0.10079563f; + float width = 0.014609014f; + short quantizedSum = 5306; + float normVmC = 9.766797f; + float vDotC = 133.56123f; + float cDotC = 132.20227f; + ES816BinaryFlatVectorsScorer.BinaryQueryVector queryVector = new ES816BinaryFlatVectorsScorer.BinaryQueryVector( + vector, + new BinaryQuantizer.QueryFactors(quantizedSum, distanceToCentroid, vl, width, normVmC, vDotC) + ); + + RandomAccessBinarizedByteVectorValues targetVectors = new RandomAccessBinarizedByteVectorValues() { + @Override + public float getCentroidDistance(int vectorOrd) { + return 0f; + } + + @Override + public float getCentroidDP() { + return cDotC; + } + + @Override + public float getVectorMagnitude(int vectorOrd) { + return 0f; + } + + @Override + public float getOOQ(int targetOrd) { + return 0.7882396f; + } + + @Override + public float getNormOC(int targetOrd) { + return 5.0889387f; + } + + @Override + public float getODotC(int targetOrd) { + return 131.485660f; + } + + @Override + public BinaryQuantizer getQuantizer() { + int dimensions = 768; + return new BinaryQuantizer(dimensions, dimensions, VectorSimilarityFunction.MAXIMUM_INNER_PRODUCT); + } + + @Override + public float[] getCentroid() { + return new float[] { + 0.16672021f, + 0.11700719f, + 0.013227397f, + 0.09305186f, + -0.029422699f, + 0.17622353f, + 0.4267106f, + -0.297038f, + 0.13915674f, + 0.38441318f, + -0.486725f, + -0.15987667f, + -0.19712289f, + 0.1349074f, + -0.19016947f, + -0.026179956f, + 0.4129807f, + 0.14325741f, + -0.09106042f, + 0.06876218f, + -0.19389102f, + 0.4467732f, + 0.03169017f, + -0.066950575f, + -0.044301506f, + -0.0059755715f, + -0.33196586f, + 0.18213534f, + -0.25065416f, + 0.30251458f, + 0.3448419f, + -0.14900115f, + -0.07782894f, + 0.3568707f, + -0.46595258f, + 0.37295088f, + -0.088741764f, + 0.17248306f, + -0.0072736046f, + 0.32928637f, + 0.13216197f, + 0.032092985f, + 0.21553043f, + 0.016091486f, + 0.31958902f, + 0.0133126f, + 0.1579258f, + 0.018537233f, + 0.046248164f, + -0.0048194043f, + -0.2184672f, + -0.26273906f, + -0.110678785f, + -0.04542999f, + -0.41625032f, + 0.46025568f, + -0.16116948f, + 0.4091706f, + 0.18427321f, + 0.004736977f, + 0.16289745f, + -0.05330932f, + -0.2694863f, + -0.14762327f, + 0.17744702f, + 0.2445075f, + 0.14377175f, + 0.37390858f, + 0.16165806f, + 0.17177118f, + 0.097307935f, + 0.36326465f, + 0.23221572f, + 0.15579978f, + -0.065486655f, + -0.29006517f, + -0.009194494f, + 0.009019374f, + 0.32154799f, + -0.23186184f, + 0.46485493f, + -0.110756285f, + -0.18604982f, + 0.35027295f, + 0.19815539f, + 0.47386464f, + -0.031379268f, + 0.124035835f, + 0.11556784f, + 0.4304302f, + -0.24455063f, + 0.1816723f, + 0.034300473f, + -0.034347706f, + 0.040140998f, + 0.1389901f, + 0.22840638f, + -0.19911191f, + 0.07563166f, + -0.2744902f, + 0.13114859f, + -0.23862572f, + -0.31404558f, + 0.41355187f, + 0.12970817f, + -0.35403475f, + -0.2714075f, + 0.07231573f, + 0.043893218f, + 0.30324167f, + 0.38928393f, + -0.1567055f, + -0.0083288215f, + 0.0487653f, + 0.12073729f, + -0.01582117f, + 0.13381198f, + -0.084824145f, + -0.15329859f, + -1.120622f, + 0.3972598f, + 0.36022213f, + -0.29826534f, + -0.09468781f, + 0.03550699f, + -0.21630692f, + 0.55655843f, + -0.14842057f, + 0.5924833f, + 0.38791573f, + 0.1502777f, + 0.111737385f, + 0.1926823f, + 0.66021144f, + 0.25601995f, + 0.28220543f, + 0.10194068f, + 0.013066262f, + -0.09348819f, + -0.24085014f, + -0.17843121f, + -0.012598432f, + 0.18757571f, + 0.48543528f, + -0.059388146f, + 0.1548026f, + 0.041945867f, + 0.3322589f, + 0.012830887f, + 0.16621992f, + 0.22606649f, + 0.13959105f, + -0.16688728f, + 0.47194278f, + -0.12767595f, + 0.037815034f, + 0.441938f, + 0.07875027f, + 0.08625042f, + 0.053454693f, + 0.74093896f, + 0.34662113f, + 0.009829135f, + -0.033400282f, + 0.030965377f, + 0.17645596f, + 0.083803624f, + 0.32578796f, + 0.49538168f, + -0.13212465f, + -0.39596975f, + 0.109529115f, + 0.2815771f, + -0.051440604f, + 0.21889819f, + 0.25598505f, + 0.012208843f, + -0.012405662f, + 0.3248759f, + 0.00997502f, + 0.05999008f, + 0.03562817f, + 0.19007418f, + 0.24805716f, + 0.5926766f, + 0.26937613f, + 0.25856f, + -0.05798439f, + -0.29168302f, + 0.14050555f, + 0.084851265f, + -0.03763504f, + 0.8265359f, + -0.23383066f, + -0.042164285f, + 0.19120507f, + -0.12189065f, + 0.3864055f, + -0.19823311f, + 0.30280992f, + 0.10814344f, + -0.164514f, + -0.22905481f, + 0.13680641f, + 0.4513772f, + -0.514546f, + -0.061746247f, + 0.11598224f, + -0.23093395f, + -0.09735358f, + 0.02767051f, + 0.11594536f, + 0.17106244f, + 0.21301728f, + -0.048222974f, + 0.2212131f, + -0.018857865f, + -0.09783516f, + 0.42156664f, + -0.14032331f, + -0.103861615f, + 0.4190284f, + 0.068923555f, + -0.015083771f, + 0.083590426f, + -0.15759592f, + -0.19096768f, + -0.4275228f, + 0.12626286f, + 0.12192557f, + 0.4157616f, + 0.048780657f, + 0.008426048f, + -0.0869124f, + 0.054927208f, + 0.28417027f, + 0.29765493f, + 0.09203619f, + -0.14446871f, + -0.117514975f, + 0.30662632f, + 0.24904715f, + -0.19551662f, + -0.0045785015f, + 0.4217626f, + -0.31457824f, + 0.23381722f, + 0.089111514f, + -0.27170828f, + -0.06662652f, + 0.10011391f, + -0.090274535f, + 0.101849966f, + 0.26554734f, + -0.1722843f, + 0.23296228f, + 0.25112453f, + -0.16790418f, + 0.010348314f, + 0.05061285f, + 0.38003662f, + 0.0804625f, + 0.3450673f, + 0.364368f, + -0.2529952f, + -0.034065288f, + 0.22796603f, + 0.5457553f, + 0.11120353f, + 0.24596325f, + 0.42822433f, + -0.19215727f, + -0.06974534f, + 0.19388479f, + -0.17598474f, + -0.08769705f, + 0.12769659f, + 0.1371616f, + -0.4636819f, + 0.16870509f, + 0.14217548f, + 0.04412187f, + -0.20930687f, + 0.0075530168f, + 0.10065227f, + 0.45334083f, + -0.1097471f, + -0.11139921f, + -0.31835595f, + -0.057386875f, + 0.16285825f, + 0.5088513f, + -0.06318843f, + -0.34759882f, + 0.21132466f, + 0.33609292f, + 0.04858872f, + -0.058759f, + 0.22845529f, + -0.07641319f, + 0.5452827f, + -0.5050389f, + 0.1788054f, + 0.37428045f, + 0.066334985f, + -0.28162515f, + -0.15629752f, + 0.33783385f, + -0.0832242f, + 0.29144394f, + 0.47892854f, + -0.47006592f, + -0.07867588f, + 0.3872869f, + 0.28053126f, + 0.52399015f, + 0.21979983f, + 0.076880336f, + 0.47866163f, + 0.252952f, + -0.1323851f, + -0.22225754f, + -0.38585815f, + 0.12967427f, + 0.20340872f, + -0.326928f, + 0.09636557f, + -0.35929212f, + 0.5413311f, + 0.019960884f, + 0.33512768f, + 0.15133342f, + -0.14124066f, + -0.1868793f, + -0.07862198f, + 0.22739467f, + 0.19598985f, + 0.34314656f, + -0.05071516f, + -0.21107961f, + 0.19934991f, + 0.04822684f, + 0.15060754f, + 0.26586458f, + -0.15528078f, + 0.123646654f, + 0.14450715f, + -0.12574252f, + 0.30608323f, + 0.018549249f, + 0.36323825f, + 0.06762097f, + 0.08562406f, + -0.07863075f, + 0.15975896f, + 0.008347004f, + 0.37931192f, + 0.22957338f, + 0.33606857f, + -0.25204057f, + 0.18126069f, + 0.41903302f, + 0.20244692f, + -0.053850617f, + 0.23088565f, + 0.16085246f, + 0.1077502f, + -0.12445943f, + 0.115779735f, + 0.124704875f, + 0.13076028f, + -0.11628619f, + -0.12580182f, + 0.065204754f, + -0.26290357f, + -0.23539798f, + -0.1855292f, + 0.39872098f, + 0.44495568f, + 0.05491784f, + 0.05135692f, + 0.624011f, + 0.22839564f, + 0.0022447354f, + -0.27169296f, + -0.1694988f, + -0.19106841f, + 0.0110123325f, + 0.15464798f, + -0.16269256f, + 0.04033836f, + -0.11792753f, + 0.17172396f, + -0.08912173f, + -0.30929542f, + -0.03446989f, + -0.21738084f, + 0.39657044f, + 0.33550346f, + -0.06839139f, + 0.053675443f, + 0.33783767f, + 0.22576828f, + 0.38280004f, + 4.1448855f, + 0.14225426f, + 0.24038498f, + 0.072373435f, + -0.09465926f, + -0.016144043f, + 0.40864578f, + -0.2583055f, + 0.031816103f, + 0.062555805f, + 0.06068663f, + 0.25858644f, + -0.10598804f, + 0.18201788f, + -0.00090025424f, + 0.085680895f, + 0.4304161f, + 0.028686283f, + 0.027298616f, + 0.27473378f, + -0.3888415f, + 0.44825438f, + 0.3600378f, + 0.038944595f, + 0.49292335f, + 0.18556066f, + 0.15779617f, + 0.29989767f, + 0.39233804f, + 0.39759228f, + 0.3850708f, + -0.0526475f, + 0.18572918f, + 0.09667526f, + -0.36111078f, + 0.3439669f, + 0.1724522f, + 0.14074509f, + 0.26097745f, + 0.16626832f, + -0.3062964f, + -0.054877423f, + 0.21702516f, + 0.4736452f, + 0.2298038f, + -0.2983771f, + 0.118479654f, + 0.35940516f, + 0.12212727f, + 0.17234904f, + 0.30632678f, + 0.09207966f, + -0.14084268f, + -0.19737118f, + 0.12442629f, + 0.52454203f, + 0.1266684f, + 0.3062802f, + 0.121598125f, + -0.09156268f, + 0.11491686f, + -0.105715364f, + 0.19831072f, + 0.061421417f, + -0.41778997f, + 0.14488487f, + 0.023310646f, + 0.27257463f, + 0.16821945f, + -0.16702746f, + 0.263203f, + 0.33512688f, + 0.35117313f, + -0.31740817f, + -0.14203706f, + 0.061256267f, + -0.19764185f, + 0.04822579f, + -0.0016218472f, + -0.025792575f, + 0.4885193f, + -0.16942391f, + -0.04156327f, + 0.15908112f, + -0.06998626f, + 0.53907114f, + 0.10317832f, + -0.365468f, + 0.4729886f, + 0.14291425f, + 0.32812154f, + -0.0273262f, + 0.31760117f, + 0.16925456f, + 0.21820979f, + 0.085142255f, + 0.16118735f, + -3.7089362f, + 0.251577f, + 0.18394576f, + 0.027926167f, + 0.15720351f, + 0.13084261f, + 0.16240814f, + 0.23045056f, + -0.3966458f, + 0.22822891f, + -0.061541352f, + 0.028320132f, + -0.14736478f, + 0.184569f, + 0.084853746f, + 0.15172474f, + 0.08277542f, + 0.27751622f, + 0.23450488f, + -0.15349835f, + 0.29665688f, + 0.32045734f, + 0.20012043f, + -0.2749372f, + 0.011832386f, + 0.05976605f, + 0.018300122f, + -0.07855043f, + -0.075900674f, + 0.0384252f, + -0.15101928f, + 0.10922137f, + 0.47396383f, + -0.1771141f, + 0.2203417f, + 0.33174303f, + 0.36640546f, + 0.10906258f, + 0.13765177f, + 0.2488032f, + -0.061588854f, + 0.20347528f, + 0.2574979f, + 0.22369152f, + 0.18777567f, + -0.0772263f, + -0.1353299f, + 0.087077625f, + -0.05409276f, + 0.027534787f, + 0.08053508f, + 0.3403908f, + -0.15362988f, + 0.07499862f, + 0.54367846f, + -0.045938436f, + 0.12206868f, + 0.031069376f, + 0.2972343f, + 0.3235321f, + -0.053970363f, + -0.0042564687f, + 0.21447177f, + 0.023565233f, + -0.1286087f, + -0.047359955f, + 0.23021339f, + 0.059837278f, + 0.19709614f, + -0.17340347f, + 0.11572943f, + 0.21720429f, + 0.29375625f, + -0.045433592f, + 0.033339307f, + 0.24594454f, + -0.021661613f, + -0.12823369f, + 0.41809165f, + 0.093840264f, + -0.007481906f, + 0.22441079f, + -0.45719734f, + 0.2292629f, + 2.675806f, + 0.3690025f, + 2.1311781f, + 0.07818368f, + -0.17055893f, + 0.3162922f, + -0.2983149f, + 0.21211359f, + 0.037087034f, + 0.021580033f, + 0.086415835f, + 0.13541797f, + -0.12453424f, + 0.04563163f, + -0.082379065f, + -0.15938349f, + 0.38595748f, + -0.8796574f, + -0.080991246f, + 0.078572094f, + 0.20274459f, + 0.009252143f, + -0.12719384f, + 0.105845824f, + 0.1592398f, + -0.08656061f, + -0.053054806f, + 0.090986334f, + -0.02223379f, + -0.18215932f, + -0.018316114f, + 0.1806707f, + 0.24788831f, + -0.041049056f, + 0.01839475f, + 0.19160001f, + -0.04827654f, + 4.4070687f, + 0.12640671f, + -0.11171499f, + -0.015480781f, + 0.14313947f, + 0.10024215f, + 0.4129662f, + 0.038836367f, + -0.030228542f, + 0.2948598f, + 0.32946473f, + 0.2237934f, + 0.14260699f, + -0.044821896f, + 0.23791742f, + 0.079720296f, + 0.27059034f, + 0.32129505f, + 0.2725177f, + 0.06883333f, + 0.1478041f, + 0.07598411f, + 0.27230525f, + -0.04704308f, + 0.045167264f, + 0.215413f, + 0.20359069f, + -0.092178136f, + -0.09523752f, + 0.21427691f, + 0.10512272f, + 5.1295033f, + 0.040909242f, + 0.007160441f, + -0.192866f, + -0.102640584f, + 0.21103396f, + -0.006780398f, + -0.049653083f, + -0.29426834f, + -0.0038102255f, + -0.13842082f, + 0.06620181f, + -0.3196518f, + 0.33279592f, + 0.13845938f, + 0.16162738f, + -0.24798508f, + -0.06672485f, + 0.195944f, + -0.11957207f, + 0.44237947f, + -0.07617347f, + 0.13575341f, + -0.35074243f, + -0.093798876f, + 0.072853446f, + -0.20490398f, + 0.26504788f, + -0.046076056f, + 0.16488416f, + 0.36007464f, + 0.20955376f, + -0.3082038f, + 0.46533757f, + -0.27326992f, + -0.14167665f, + 0.25017953f, + 0.062622115f, + 0.14057694f, + -0.102370486f, + 0.33898357f, + 0.36456722f, + -0.10120469f, + -0.27838466f, + -0.11779602f, + 0.18517569f, + -0.05942488f, + 0.076405466f, + 0.007960496f, + 0.0443746f, + 0.098998964f, + -0.01897129f, + 0.8059487f, + 0.06991939f, + 0.26562217f, + 0.26942885f, + 0.11432197f, + -0.0055776504f, + 0.054493718f, + -0.13086213f, + 0.6841702f, + 0.121975765f, + 0.02787146f, + 0.29039973f, + 0.30943078f, + 0.21762547f, + 0.28751117f, + 0.027524523f, + 0.5315654f, + -0.22451901f, + -0.13782433f, + 0.08228316f, + 0.07808882f, + 0.17445615f, + -0.042489477f, + 0.13232234f, + 0.2756272f, + -0.18824948f, + 0.14326479f, + -0.119312495f, + 0.011788091f, + -0.22103515f, + -0.2477118f, + -0.10513839f, + 0.034028634f, + 0.10693818f, + 0.03057979f, + 0.04634646f, + 0.2289361f, + 0.09981585f, + 0.26901972f, + 0.1561221f, + -0.10639886f, + 0.36466748f, + 0.06350991f, + 0.027927283f, + 0.11919768f, + 0.23290513f, + -0.03417105f, + 0.16698854f, + -0.19243467f, + 0.28430334f, + 0.03754995f, + -0.08697018f, + 0.20413163f, + -0.27218238f, + 0.13707504f, + -0.082289375f, + 0.03479585f, + 0.2298305f, + 0.4983682f, + 0.34522808f, + -0.05711886f, + -0.10568684f, + -0.07771385f }; + } + + @Override + public RandomAccessBinarizedByteVectorValues copy() { + return null; + } + + @Override + public byte[] vectorValue(int targetOrd) { + return new byte[] { + -88, + -3, + 60, + -75, + -38, + 79, + 84, + -53, + -116, + -126, + 19, + -19, + -21, + -80, + 69, + 101, + -71, + 53, + 101, + -124, + -24, + -76, + 92, + -45, + 108, + -107, + -18, + 102, + 23, + -80, + -47, + 116, + 87, + -50, + 27, + -31, + -10, + -13, + 117, + -88, + -27, + -93, + -98, + -39, + 30, + -109, + -114, + 5, + -15, + 98, + -82, + 81, + 83, + 118, + 30, + -118, + -12, + -95, + 121, + 125, + -13, + -88, + 75, + -85, + -56, + -126, + 82, + -59, + 48, + -81, + 67, + -63, + 81, + 24, + -83, + 95, + -44, + 103, + 3, + -40, + -13, + -41, + -29, + -60, + 1, + 65, + -4, + -110, + -40, + 34, + 118, + 51, + -76, + 75, + 70, + -51 }; + } + + @Override + public int size() { + return 1; + } + + @Override + public int dimension() { + return dimensions; + } + }; + + VectorSimilarityFunction similarityFunction = VectorSimilarityFunction.MAXIMUM_INNER_PRODUCT; + + ES816BinaryFlatVectorsScorer.BinarizedRandomVectorScorer scorer = new ES816BinaryFlatVectorsScorer.BinarizedRandomVectorScorer( + queryVector, + targetVectors, + similarityFunction + ); + + assertEquals(132.30249f, scorer.score(0), 0.0001f); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsFormatTests.java new file mode 100644 index 000000000000..0892436891ff --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsFormatTests.java @@ -0,0 +1,175 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2024 Elasticsearch B.V. + */ +package org.elasticsearch.index.codec.vectors; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.codecs.FilterCodec; +import org.apache.lucene.codecs.KnnVectorsFormat; +import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.KnnFloatVectorField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.FloatVectorValues; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.VectorSimilarityFunction; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.KnnFloatVectorQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.TotalHits; +import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.index.BaseKnnVectorsFormatTestCase; +import org.elasticsearch.common.logging.LogConfigurator; + +import java.io.IOException; +import java.util.Locale; + +import static java.lang.String.format; +import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.oneOf; + +public class ES816BinaryQuantizedVectorsFormatTests extends BaseKnnVectorsFormatTestCase { + + static { + LogConfigurator.loadLog4jPlugins(); + LogConfigurator.configureESLogging(); // native access requires logging to be initialized + } + + @Override + protected Codec getCodec() { + return new Lucene912Codec() { + @Override + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return new ES816BinaryQuantizedVectorsFormat(); + } + }; + } + + public void testSearch() throws Exception { + String fieldName = "field"; + int numVectors = random().nextInt(99, 500); + int dims = random().nextInt(4, 65); + float[] vector = randomVector(dims); + VectorSimilarityFunction similarityFunction = randomSimilarity(); + KnnFloatVectorField knnField = new KnnFloatVectorField(fieldName, vector, similarityFunction); + IndexWriterConfig iwc = newIndexWriterConfig(); + try (Directory dir = newDirectory()) { + try (IndexWriter w = new IndexWriter(dir, iwc)) { + for (int i = 0; i < numVectors; i++) { + Document doc = new Document(); + knnField.setVectorValue(randomVector(dims)); + doc.add(knnField); + w.addDocument(doc); + } + w.commit(); + + try (IndexReader reader = DirectoryReader.open(w)) { + IndexSearcher searcher = new IndexSearcher(reader); + final int k = random().nextInt(5, 50); + float[] queryVector = randomVector(dims); + Query q = new KnnFloatVectorQuery(fieldName, queryVector, k); + TopDocs collectedDocs = searcher.search(q, k); + assertEquals(k, collectedDocs.totalHits.value); + assertEquals(TotalHits.Relation.EQUAL_TO, collectedDocs.totalHits.relation); + } + } + } + } + + public void testToString() { + FilterCodec customCodec = new FilterCodec("foo", Codec.getDefault()) { + @Override + public KnnVectorsFormat knnVectorsFormat() { + return new ES816BinaryQuantizedVectorsFormat(); + } + }; + String expectedPattern = "ES816BinaryQuantizedVectorsFormat(" + + "name=ES816BinaryQuantizedVectorsFormat, " + + "flatVectorScorer=ES816BinaryFlatVectorsScorer(nonQuantizedDelegate=%s()))"; + var defaultScorer = format(Locale.ROOT, expectedPattern, "DefaultFlatVectorScorer"); + var memSegScorer = format(Locale.ROOT, expectedPattern, "Lucene99MemorySegmentFlatVectorsScorer"); + assertThat(customCodec.knnVectorsFormat().toString(), is(oneOf(defaultScorer, memSegScorer))); + } + + @Override + public void testRandomWithUpdatesAndGraph() { + // graph not supported + } + + @Override + public void testSearchWithVisitedLimit() { + // visited limit is not respected, as it is brute force search + } + + public void testQuantizedVectorsWriteAndRead() throws IOException { + String fieldName = "field"; + int numVectors = random().nextInt(99, 500); + int dims = random().nextInt(4, 65); + + float[] vector = randomVector(dims); + VectorSimilarityFunction similarityFunction = randomSimilarity(); + KnnFloatVectorField knnField = new KnnFloatVectorField(fieldName, vector, similarityFunction); + try (Directory dir = newDirectory()) { + try (IndexWriter w = new IndexWriter(dir, newIndexWriterConfig())) { + for (int i = 0; i < numVectors; i++) { + Document doc = new Document(); + knnField.setVectorValue(randomVector(dims)); + doc.add(knnField); + w.addDocument(doc); + if (i % 101 == 0) { + w.commit(); + } + } + w.commit(); + w.forceMerge(1); + + try (IndexReader reader = DirectoryReader.open(w)) { + LeafReader r = getOnlyLeafReader(reader); + FloatVectorValues vectorValues = r.getFloatVectorValues(fieldName); + assertEquals(vectorValues.size(), numVectors); + OffHeapBinarizedVectorValues qvectorValues = ((ES816BinaryQuantizedVectorsReader.BinarizedVectorValues) vectorValues) + .getQuantizedVectorValues(); + float[] centroid = qvectorValues.getCentroid(); + assertEquals(centroid.length, dims); + + int descritizedDimension = BQVectorUtils.discretize(dims, 64); + BinaryQuantizer quantizer = new BinaryQuantizer(dims, descritizedDimension, similarityFunction); + byte[] expectedVector = new byte[BQVectorUtils.discretize(dims, 64) / 8]; + if (similarityFunction == VectorSimilarityFunction.COSINE) { + vectorValues = new ES816BinaryQuantizedVectorsWriter.NormalizedFloatVectorValues(vectorValues); + } + + while (vectorValues.nextDoc() != NO_MORE_DOCS) { + float[] corrections = quantizer.quantizeForIndex(vectorValues.vectorValue(), expectedVector, centroid); + assertArrayEquals(expectedVector, qvectorValues.vectorValue()); + assertEquals(corrections.length, qvectorValues.getCorrectiveTerms().length); + for (int i = 0; i < corrections.length; i++) { + assertEquals(corrections[i], qvectorValues.getCorrectiveTerms()[i], 0.00001f); + } + } + } + } + } + } +} diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816HnswBinaryQuantizedVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816HnswBinaryQuantizedVectorsFormatTests.java new file mode 100644 index 000000000000..f607de57e1fd --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816HnswBinaryQuantizedVectorsFormatTests.java @@ -0,0 +1,126 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2024 Elasticsearch B.V. + */ +package org.elasticsearch.index.codec.vectors; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.codecs.FilterCodec; +import org.apache.lucene.codecs.KnnVectorsFormat; +import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsReader; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.KnnFloatVectorField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.FloatVectorValues; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.VectorSimilarityFunction; +import org.apache.lucene.search.TopDocs; +import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.index.BaseKnnVectorsFormatTestCase; +import org.apache.lucene.util.SameThreadExecutorService; +import org.elasticsearch.common.logging.LogConfigurator; + +import java.util.Arrays; +import java.util.Locale; + +import static java.lang.String.format; +import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.oneOf; + +public class ES816HnswBinaryQuantizedVectorsFormatTests extends BaseKnnVectorsFormatTestCase { + + static { + LogConfigurator.loadLog4jPlugins(); + LogConfigurator.configureESLogging(); // native access requires logging to be initialized + } + + @Override + protected Codec getCodec() { + return new Lucene912Codec() { + @Override + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return new ES816HnswBinaryQuantizedVectorsFormat(); + } + }; + } + + public void testToString() { + FilterCodec customCodec = new FilterCodec("foo", Codec.getDefault()) { + @Override + public KnnVectorsFormat knnVectorsFormat() { + return new ES816HnswBinaryQuantizedVectorsFormat(10, 20, 1, null); + } + }; + String expectedPattern = + "ES816HnswBinaryQuantizedVectorsFormat(name=ES816HnswBinaryQuantizedVectorsFormat, maxConn=10, beamWidth=20," + + " flatVectorFormat=ES816BinaryQuantizedVectorsFormat(name=ES816BinaryQuantizedVectorsFormat," + + " flatVectorScorer=ES816BinaryFlatVectorsScorer(nonQuantizedDelegate=%s())))"; + + var defaultScorer = format(Locale.ROOT, expectedPattern, "DefaultFlatVectorScorer"); + var memSegScorer = format(Locale.ROOT, expectedPattern, "Lucene99MemorySegmentFlatVectorsScorer"); + assertThat(customCodec.knnVectorsFormat().toString(), is(oneOf(defaultScorer, memSegScorer))); + } + + public void testSingleVectorCase() throws Exception { + float[] vector = randomVector(random().nextInt(12, 500)); + for (VectorSimilarityFunction similarityFunction : VectorSimilarityFunction.values()) { + try (Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig())) { + Document doc = new Document(); + doc.add(new KnnFloatVectorField("f", vector, similarityFunction)); + w.addDocument(doc); + w.commit(); + try (IndexReader reader = DirectoryReader.open(w)) { + LeafReader r = getOnlyLeafReader(reader); + FloatVectorValues vectorValues = r.getFloatVectorValues("f"); + assert (vectorValues.size() == 1); + while (vectorValues.nextDoc() != NO_MORE_DOCS) { + assertArrayEquals(vector, vectorValues.vectorValue(), 0.00001f); + } + TopDocs td = r.searchNearestVectors("f", randomVector(vector.length), 1, null, Integer.MAX_VALUE); + assertEquals(1, td.totalHits.value); + assertTrue(td.scoreDocs[0].score >= 0); + } + } + } + } + + public void testLimits() { + expectThrows(IllegalArgumentException.class, () -> new ES816HnswBinaryQuantizedVectorsFormat(-1, 20)); + expectThrows(IllegalArgumentException.class, () -> new ES816HnswBinaryQuantizedVectorsFormat(0, 20)); + expectThrows(IllegalArgumentException.class, () -> new ES816HnswBinaryQuantizedVectorsFormat(20, 0)); + expectThrows(IllegalArgumentException.class, () -> new ES816HnswBinaryQuantizedVectorsFormat(20, -1)); + expectThrows(IllegalArgumentException.class, () -> new ES816HnswBinaryQuantizedVectorsFormat(512 + 1, 20)); + expectThrows(IllegalArgumentException.class, () -> new ES816HnswBinaryQuantizedVectorsFormat(20, 3201)); + expectThrows( + IllegalArgumentException.class, + () -> new ES816HnswBinaryQuantizedVectorsFormat(20, 100, 1, new SameThreadExecutorService()) + ); + } + + // Ensures that all expected vector similarity functions are translatable in the format. + public void testVectorSimilarityFuncs() { + // This does not necessarily have to be all similarity functions, but + // differences should be considered carefully. + var expectedValues = Arrays.stream(VectorSimilarityFunction.values()).toList(); + assertEquals(Lucene99HnswVectorsReader.SIMILARITY_FUNCTIONS, expectedValues); + } +} From 84fe9cf3a303c8c9477abe16c1783cd319e2c89f Mon Sep 17 00:00:00 2001 From: Dianna Hohensee Date: Tue, 8 Oct 2024 22:59:37 +0200 Subject: [PATCH 66/85] Track shard snapshot progress during node shutdown (#112567) Track shard snapshot progress during shutdown to identify any bottlenecks that cause slowness that can ultimately block shard re-allocation. Relates ES-9086 --- docs/changelog/112567.yaml | 5 + .../decider/DiskThresholdDeciderIT.java | 36 +- .../snapshots/SnapshotShutdownIT.java | 255 ++++++++++- .../cluster/node/DiscoveryNodes.java | 5 + .../common/settings/ClusterSettings.java | 2 + .../snapshots/IndexShardSnapshotStatus.java | 31 ++ .../snapshots/SnapshotShardsService.java | 96 ++++- .../SnapshotShutdownProgressTracker.java | 270 ++++++++++++ .../SnapshotShutdownProgressTrackerTests.java | 407 ++++++++++++++++++ .../elasticsearch/test/ESIntegTestCase.java | 32 ++ 10 files changed, 1087 insertions(+), 52 deletions(-) create mode 100644 docs/changelog/112567.yaml create mode 100644 server/src/main/java/org/elasticsearch/snapshots/SnapshotShutdownProgressTracker.java create mode 100644 server/src/test/java/org/elasticsearch/snapshots/SnapshotShutdownProgressTrackerTests.java diff --git a/docs/changelog/112567.yaml b/docs/changelog/112567.yaml new file mode 100644 index 000000000000..25e3ac8360c2 --- /dev/null +++ b/docs/changelog/112567.yaml @@ -0,0 +1,5 @@ +pr: 112567 +summary: Track shard snapshot progress during node shutdown +area: Snapshot/Restore +type: enhancement +issues: [] diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java index 2a275cf563d8..19b0f0bd7323 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java @@ -253,35 +253,17 @@ public class DiskThresholdDeciderIT extends DiskUsageIntegTestCase { } /** - * Index documents until all the shards are at least WATERMARK_BYTES in size, and return the one with the smallest size + * Index documents until all the shards are at least WATERMARK_BYTES in size. + * @return the shard sizes. */ private ShardSizes createReasonableSizedShards(final String indexName) { - while (true) { - indexRandom(false, indexName, scaledRandomIntBetween(100, 10000)); - forceMerge(); - refresh(); - - final ShardStats[] shardStates = indicesAdmin().prepareStats(indexName) - .clear() - .setStore(true) - .setTranslog(true) - .get() - .getShards(); - - var smallestShardSize = Arrays.stream(shardStates) - .mapToLong(it -> it.getStats().getStore().sizeInBytes()) - .min() - .orElseThrow(() -> new AssertionError("no shards")); - - if (smallestShardSize > WATERMARK_BYTES) { - var shardSizes = Arrays.stream(shardStates) - .map(it -> new ShardSize(removeIndexUUID(it.getShardRouting().shardId()), it.getStats().getStore().sizeInBytes())) - .sorted(Comparator.comparing(ShardSize::size)) - .toList(); - logger.info("Created shards with sizes {}", shardSizes); - return new ShardSizes(shardSizes); - } - } + ShardStats[] shardStats = indexAllShardsToAnEqualOrGreaterMinimumSize(indexName, WATERMARK_BYTES); + var shardSizes = Arrays.stream(shardStats) + .map(it -> new ShardSize(removeIndexUUID(it.getShardRouting().shardId()), it.getStats().getStore().sizeInBytes())) + .sorted(Comparator.comparing(ShardSize::size)) + .toList(); + logger.info("Created shards with sizes {}", shardSizes); + return new ShardSizes(shardSizes); } private record ShardSizes(List sizes) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShutdownIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShutdownIT.java index 3c71b50321c7..980ef2a87c9c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShutdownIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotShutdownIT.java @@ -9,6 +9,7 @@ package org.elasticsearch.snapshots; +import org.apache.logging.log4j.Level; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; @@ -33,19 +34,26 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ClusterServiceUtils; +import org.elasticsearch.test.MockLog; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; import java.util.Collection; import java.util.Map; +import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.stream.Stream; +import static org.elasticsearch.snapshots.SnapshotShutdownProgressTracker.SNAPSHOT_PROGRESS_DURING_SHUTDOWN_LOG_INTERVAL_SETTING; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasSize; @@ -55,15 +63,44 @@ public class SnapshotShutdownIT extends AbstractSnapshotIntegTestCase { private static final String REQUIRE_NODE_NAME_SETTING = IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_PREFIX + "._name"; + private MockLog mockLog; + + public void setUp() throws Exception { + super.setUp(); + mockLog = MockLog.capture(SnapshotShutdownProgressTracker.class); + } + + private void resetMockLog() { + mockLog.close(); + mockLog = MockLog.capture(SnapshotShutdownProgressTracker.class); + } + + public void tearDown() throws Exception { + mockLog.close(); + super.tearDown(); + } + @Override protected Collection> nodePlugins() { return CollectionUtils.appendToCopy(super.nodePlugins(), MockTransportService.TestPlugin.class); } + /** + * Tests that shard snapshots on a node with RESTART shutdown metadata will finish on the same node. + */ + @TestLogging( + value = "org.elasticsearch.snapshots.SnapshotShutdownProgressTracker:DEBUG", + reason = "Testing SnapshotShutdownProgressTracker's progress, which is reported at the DEBUG logging level" + ) public void testRestartNodeDuringSnapshot() throws Exception { // Marking a node for restart has no impact on snapshots (see #71333 for how to handle this case) internalCluster().ensureAtLeastNumDataNodes(1); - final var originalNode = internalCluster().startDataOnlyNode(); + final var originalNode = internalCluster().startDataOnlyNode( + // Speed up the logging frequency, so that the test doesn't have to wait too long to check for log messages. + Settings.builder().put(SNAPSHOT_PROGRESS_DURING_SHUTDOWN_LOG_INTERVAL_SETTING.getKey(), TimeValue.timeValueMillis(200)).build() + ); + final String originalNodeId = internalCluster().getInstance(NodeEnvironment.class, originalNode).nodeId(); + final var indexName = randomIdentifier(); createIndexWithContent(indexName, indexSettings(1, 0).put(REQUIRE_NODE_NAME_SETTING, originalNode).build()); @@ -88,6 +125,16 @@ public class SnapshotShutdownIT extends AbstractSnapshotIntegTestCase { }); addUnassignedShardsWatcher(clusterService, indexName); + // Ensure that the SnapshotShutdownProgressTracker does not start logging in RESTART mode. + mockLog.addExpectation( + new MockLog.UnseenEventExpectation( + "SnapshotShutdownProgressTracker start log message", + SnapshotShutdownProgressTracker.class.getCanonicalName(), + Level.DEBUG, + "Starting shutdown snapshot progress logging on node [" + originalNodeId + "]" + ) + ); + safeAwait( (ActionListener listener) -> putShutdownMetadata( clusterService, @@ -100,9 +147,15 @@ public class SnapshotShutdownIT extends AbstractSnapshotIntegTestCase { ) ); assertFalse(snapshotCompletesWithoutPausingListener.isDone()); + + // Verify no SnapshotShutdownProgressTracker logging in RESTART mode. + mockLog.awaitAllExpectationsMatched(); + resetMockLog(); + unblockAllDataNodes(repoName); // lets the shard snapshot continue so the snapshot can succeed assertEquals(SnapshotState.SUCCESS, snapshotFuture.get(10, TimeUnit.SECONDS).getSnapshotInfo().state()); safeAwait(snapshotCompletesWithoutPausingListener); + clearShutdownMetadata(clusterService); } @@ -117,7 +170,7 @@ public class SnapshotShutdownIT extends AbstractSnapshotIntegTestCase { final var clusterService = internalCluster().getCurrentMasterNodeInstance(ClusterService.class); final var snapshotFuture = startFullSnapshotBlockedOnDataNode(randomIdentifier(), repoName, originalNode); - final var snapshotPausedListener = createSnapshotPausedListener(clusterService, repoName, indexName); + final var snapshotPausedListener = createSnapshotPausedListener(clusterService, repoName, indexName, 1); addUnassignedShardsWatcher(clusterService, indexName); updateIndexSettings(Settings.builder().putNull(REQUIRE_NODE_NAME_SETTING), indexName); @@ -146,7 +199,7 @@ public class SnapshotShutdownIT extends AbstractSnapshotIntegTestCase { final var clusterService = internalCluster().getCurrentMasterNodeInstance(ClusterService.class); final var snapshotFuture = startFullSnapshotBlockedOnDataNode(randomIdentifier(), repoName, originalNode); - final var snapshotPausedListener = createSnapshotPausedListener(clusterService, repoName, indexName); + final var snapshotPausedListener = createSnapshotPausedListener(clusterService, repoName, indexName, 1); addUnassignedShardsWatcher(clusterService, indexName); final var snapshotStatusUpdateBarrier = new CyclicBarrier(2); @@ -264,7 +317,7 @@ public class SnapshotShutdownIT extends AbstractSnapshotIntegTestCase { final var clusterService = internalCluster().getCurrentMasterNodeInstance(ClusterService.class); final var snapshotFuture = startFullSnapshotBlockedOnDataNode(randomIdentifier(), repoName, nodeForRemoval); - final var snapshotPausedListener = createSnapshotPausedListener(clusterService, repoName, indexName); + final var snapshotPausedListener = createSnapshotPausedListener(clusterService, repoName, indexName, 1); addUnassignedShardsWatcher(clusterService, indexName); waitForBlock(otherNode, repoName); @@ -320,7 +373,7 @@ public class SnapshotShutdownIT extends AbstractSnapshotIntegTestCase { final var clusterService = internalCluster().getCurrentMasterNodeInstance(ClusterService.class); final var snapshotFuture = startFullSnapshotBlockedOnDataNode(randomIdentifier(), repoName, primaryNode); - final var snapshotPausedListener = createSnapshotPausedListener(clusterService, repoName, indexName); + final var snapshotPausedListener = createSnapshotPausedListener(clusterService, repoName, indexName, 1); addUnassignedShardsWatcher(clusterService, indexName); putShutdownForRemovalMetadata(primaryNode, clusterService); @@ -334,6 +387,9 @@ public class SnapshotShutdownIT extends AbstractSnapshotIntegTestCase { assertEquals(SnapshotState.SUCCESS, snapshotFuture.get(10, TimeUnit.SECONDS).getSnapshotInfo().state()); } + /** + * Tests that deleting a snapshot will abort paused shard snapshots on a node with shutdown metadata. + */ public void testAbortSnapshotWhileRemovingNode() throws Exception { final var primaryNode = internalCluster().startDataOnlyNode(); final var indexName = randomIdentifier(); @@ -363,7 +419,7 @@ public class SnapshotShutdownIT extends AbstractSnapshotIntegTestCase { final var clusterService = internalCluster().getCurrentMasterNodeInstance(ClusterService.class); addUnassignedShardsWatcher(clusterService, indexName); putShutdownForRemovalMetadata(primaryNode, clusterService); - unblockAllDataNodes(repoName); // lets the shard snapshot abort, but allocation filtering stops it from moving + unblockAllDataNodes(repoName); // lets the shard snapshot pause, but allocation filtering stops it from moving safeAwait(updateSnapshotStatusBarrier); // wait for data node to notify master that the shard snapshot is paused // abort snapshot (and wait for the abort to land in the cluster state) @@ -414,10 +470,180 @@ public class SnapshotShutdownIT extends AbstractSnapshotIntegTestCase { clearShutdownMetadata(clusterService); } + /** + * This test exercises the SnapshotShutdownProgressTracker's log messages reporting the progress of shard snapshots on data nodes. + */ + @TestLogging( + value = "org.elasticsearch.snapshots.SnapshotShutdownProgressTracker:TRACE", + reason = "Testing SnapshotShutdownProgressTracker's progress, which is reported at the TRACE logging level" + ) + public void testSnapshotShutdownProgressTracker() throws Exception { + final var repoName = randomIdentifier(); + final int numShards = randomIntBetween(1, 10); + createRepository(repoName, "mock"); + + // Create another index on another node which will be blocked (remain in state INIT) throughout. + // Not required for this test, just adds some more concurrency. + final var otherNode = internalCluster().startDataOnlyNode(); + final var otherIndex = randomIdentifier(); + createIndexWithContent(otherIndex, indexSettings(numShards, 0).put(REQUIRE_NODE_NAME_SETTING, otherNode).build()); + blockDataNode(repoName, otherNode); + + final var nodeForRemoval = internalCluster().startDataOnlyNode( + // Speed up the logging frequency, so that the test doesn't have to wait too long to check for log messages. + Settings.builder().put(SNAPSHOT_PROGRESS_DURING_SHUTDOWN_LOG_INTERVAL_SETTING.getKey(), TimeValue.timeValueMillis(200)).build() + ); + final String nodeForRemovalId = internalCluster().getInstance(NodeEnvironment.class, nodeForRemoval).nodeId(); + final var indexName = randomIdentifier(); + createIndexWithContent(indexName, indexSettings(numShards, 0).put(REQUIRE_NODE_NAME_SETTING, nodeForRemoval).build()); + indexAllShardsToAnEqualOrGreaterMinimumSize(indexName, new ByteSizeValue(2, ByteSizeUnit.KB).getBytes()); + + // Start the snapshot with blocking in place on the data node not to allow shard snapshots to finish yet. + final var clusterService = internalCluster().getCurrentMasterNodeInstance(ClusterService.class); + final var snapshotFuture = startFullSnapshotBlockedOnDataNode(randomIdentifier(), repoName, nodeForRemoval); + final var snapshotPausedListener = createSnapshotPausedListener(clusterService, repoName, indexName, numShards); + addUnassignedShardsWatcher(clusterService, indexName); + + waitForBlock(otherNode, repoName); + + logger.info("---> nodeForRemovalId: " + nodeForRemovalId + ", numShards: " + numShards); + mockLog.addExpectation( + new MockLog.SeenEventExpectation( + "SnapshotShutdownProgressTracker start log message", + SnapshotShutdownProgressTracker.class.getCanonicalName(), + Level.DEBUG, + "Starting shutdown snapshot progress logging on node [" + nodeForRemovalId + "]" + ) + ); + mockLog.addExpectation( + new MockLog.SeenEventExpectation( + "SnapshotShutdownProgressTracker pause set log message", + SnapshotShutdownProgressTracker.class.getCanonicalName(), + Level.DEBUG, + "Pause signals have been set for all shard snapshots on data node [" + nodeForRemovalId + "]" + ) + ); + + putShutdownForRemovalMetadata(nodeForRemoval, clusterService); + + // Check that the SnapshotShutdownProgressTracker was turned on after the shutdown metadata is set above. + mockLog.awaitAllExpectationsMatched(); + resetMockLog(); + + mockLog.addExpectation( + new MockLog.SeenEventExpectation( + "SnapshotShutdownProgressTracker running number of snapshots", + SnapshotShutdownProgressTracker.class.getCanonicalName(), + Level.INFO, + "*Number shard snapshots running [" + numShards + "].*" + ) + ); + + // Check that the SnapshotShutdownProgressTracker is tracking the active (not yet paused) shard snapshots. + mockLog.awaitAllExpectationsMatched(); + resetMockLog(); + + // Block on the master when a shard snapshot request comes in, until we can verify that the Tracker saw the outgoing request. + final CountDownLatch snapshotStatusUpdateLatch = new CountDownLatch(1); + final var masterTransportService = MockTransportService.getInstance(internalCluster().getMasterName()); + masterTransportService.addRequestHandlingBehavior( + SnapshotsService.UPDATE_SNAPSHOT_STATUS_ACTION_NAME, + (handler, request, channel, task) -> masterTransportService.getThreadPool().generic().execute(() -> { + safeAwait(snapshotStatusUpdateLatch); + try { + handler.messageReceived(request, channel, task); + } catch (Exception e) { + fail(e); + } + }) + ); + + mockLog.addExpectation( + new MockLog.SeenEventExpectation( + "SnapshotShutdownProgressTracker shard snapshot has paused log message", + SnapshotShutdownProgressTracker.class.getCanonicalName(), + Level.INFO, + "*Number shard snapshots waiting for master node reply to status update request [" + numShards + "]*" + ) + ); + + // Let the shard snapshot proceed. It will still get stuck waiting for the master node to respond. + unblockNode(repoName, nodeForRemoval); + + // Check that the SnapshotShutdownProgressTracker observed the request sent to the master node. + mockLog.awaitAllExpectationsMatched(); + resetMockLog(); + + mockLog.addExpectation( + new MockLog.SeenEventExpectation( + "SnapshotShutdownProgressTracker shard snapshot has paused log message", + SnapshotShutdownProgressTracker.class.getCanonicalName(), + Level.INFO, + "Current active shard snapshot stats on data node [" + nodeForRemovalId + "]*Paused [" + numShards + "]" + ) + ); + + // Release the master node to respond + snapshotStatusUpdateLatch.countDown(); + + // Wait for the snapshot to fully pause. + safeAwait(snapshotPausedListener); + + // Check that the SnapshotShutdownProgressTracker observed the shard snapshot finishing as paused. + mockLog.awaitAllExpectationsMatched(); + resetMockLog(); + + // Remove the allocation filter so that the shard moves off of the node shutting down. + updateIndexSettings(Settings.builder().putNull(REQUIRE_NODE_NAME_SETTING), indexName); + + // Wait for the shard snapshot to succeed on the non-shutting down node. + safeAwait( + ClusterServiceUtils.addTemporaryStateListener( + clusterService, + state -> SnapshotsInProgress.get(state) + .asStream() + .allMatch( + e -> e.shards() + .entrySet() + .stream() + .anyMatch( + shardEntry -> shardEntry.getKey().getIndexName().equals(indexName) + && switch (shardEntry.getValue().state()) { + case INIT, PAUSED_FOR_NODE_REMOVAL -> false; + case SUCCESS -> true; + case FAILED, ABORTED, MISSING, QUEUED, WAITING -> throw new AssertionError(shardEntry.toString()); + } + ) + ) + ) + ); + + unblockAllDataNodes(repoName); + + // Snapshot completes when the node vacates even though it hasn't been removed yet + assertEquals(SnapshotState.SUCCESS, snapshotFuture.get(10, TimeUnit.SECONDS).getSnapshotInfo().state()); + + mockLog.addExpectation( + new MockLog.SeenEventExpectation( + "SnapshotShutdownProgressTracker cancelled log message", + SnapshotShutdownProgressTracker.class.getCanonicalName(), + Level.DEBUG, + "Cancelling shutdown snapshot progress logging on node [" + nodeForRemovalId + "]" + ) + ); + + clearShutdownMetadata(clusterService); + + // Check that the SnapshotShutdownProgressTracker logging was cancelled by the removal of the shutdown metadata. + mockLog.awaitAllExpectationsMatched(); + resetMockLog(); + } + private static SubscribableListener createSnapshotPausedListener( ClusterService clusterService, String repoName, - String indexName + String indexName, + int numShards ) { return ClusterServiceUtils.addTemporaryStateListener(clusterService, state -> { final var entriesForRepo = SnapshotsInProgress.get(state).forRepo(repoName); @@ -434,10 +660,17 @@ public class SnapshotShutdownIT extends AbstractSnapshotIntegTestCase { .stream() .flatMap(e -> e.getKey().getIndexName().equals(indexName) ? Stream.of(e.getValue()) : Stream.of()) .toList(); - assertThat(shardSnapshotStatuses, hasSize(1)); - final var shardState = shardSnapshotStatuses.iterator().next().state(); - assertThat(shardState, oneOf(SnapshotsInProgress.ShardState.INIT, SnapshotsInProgress.ShardState.PAUSED_FOR_NODE_REMOVAL)); - return shardState == SnapshotsInProgress.ShardState.PAUSED_FOR_NODE_REMOVAL; + assertThat(shardSnapshotStatuses, hasSize(numShards)); + for (var shardStatus : shardSnapshotStatuses) { + assertThat( + shardStatus.state(), + oneOf(SnapshotsInProgress.ShardState.INIT, SnapshotsInProgress.ShardState.PAUSED_FOR_NODE_REMOVAL) + ); + if (shardStatus.state() == SnapshotsInProgress.ShardState.INIT) { + return false; + } + } + return true; }); } diff --git a/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java b/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java index a7ae17c8dac1..9477f9c6a5cc 100644 --- a/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java +++ b/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java @@ -628,6 +628,11 @@ public class DiscoveryNodes implements Iterable, SimpleDiffable listener) { abortListeners.addListener(listener); } @@ -429,4 +433,31 @@ public class IndexShardSnapshotStatus { + ')'; } } + + @Override + public String toString() { + return "index shard snapshot status (" + + "stage=" + + stage + + ", startTime=" + + startTime + + ", totalTime=" + + totalTime + + ", incrementalFileCount=" + + incrementalFileCount + + ", totalFileCount=" + + totalFileCount + + ", processedFileCount=" + + processedFileCount + + ", incrementalSize=" + + incrementalSize + + ", totalSize=" + + totalSize + + ", processedSize=" + + processedSize + + ", failure='" + + failure + + '\'' + + ')'; + } } diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java index abc5f36eef7d..7b2066f24377 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java @@ -21,6 +21,8 @@ import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.SnapshotsInProgress; import org.elasticsearch.cluster.SnapshotsInProgress.ShardSnapshotStatus; import org.elasticsearch.cluster.SnapshotsInProgress.ShardState; +import org.elasticsearch.cluster.metadata.NodesShutdownMetadata; +import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; @@ -82,6 +84,8 @@ public final class SnapshotShardsService extends AbstractLifecycleComponent impl private final ThreadPool threadPool; + private final SnapshotShutdownProgressTracker snapshotShutdownProgressTracker; + private final Map> shardSnapshots = new HashMap<>(); // A map of snapshots to the shardIds that we already reported to the master as failed @@ -102,6 +106,11 @@ public final class SnapshotShardsService extends AbstractLifecycleComponent impl this.transportService = transportService; this.clusterService = clusterService; this.threadPool = transportService.getThreadPool(); + this.snapshotShutdownProgressTracker = new SnapshotShutdownProgressTracker( + () -> clusterService.state().nodes().getLocalNodeId(), + clusterService.getClusterSettings(), + threadPool + ); this.remoteFailedRequestDeduplicator = new ResultDeduplicator<>(threadPool.getThreadContext()); if (DiscoveryNode.canContainData(settings)) { // this is only useful on the nodes that can hold data @@ -130,11 +139,38 @@ public final class SnapshotShardsService extends AbstractLifecycleComponent impl @Override public void clusterChanged(ClusterChangedEvent event) { try { + final var localNodeId = clusterService.localNode().getId(); + + // Track when this node enters and leaves shutdown mode because we pause shard snapshots for shutdown. + // The snapshotShutdownProgressTracker will report (via logging) on the progress shard snapshots make + // towards either completing (successfully or otherwise) or pausing. + NodesShutdownMetadata currentShutdownMetadata = event.state().metadata().custom(NodesShutdownMetadata.TYPE); + NodesShutdownMetadata previousShutdownMetadata = event.previousState().metadata().custom(NodesShutdownMetadata.TYPE); + SingleNodeShutdownMetadata currentLocalNodeShutdownMetadata = currentShutdownMetadata != null + ? currentShutdownMetadata.get(localNodeId) + : null; + SingleNodeShutdownMetadata previousLocalNodeShutdownMetadata = previousShutdownMetadata != null + ? previousShutdownMetadata.get(localNodeId) + : null; + + boolean isLocalNodeAddingShutdown = false; + if (isPausingProgressTrackedShutdown(previousLocalNodeShutdownMetadata) == false + && isPausingProgressTrackedShutdown(currentLocalNodeShutdownMetadata)) { + snapshotShutdownProgressTracker.onClusterStateAddShutdown(); + isLocalNodeAddingShutdown = true; + } else if (isPausingProgressTrackedShutdown(previousLocalNodeShutdownMetadata) + && isPausingProgressTrackedShutdown(currentLocalNodeShutdownMetadata) == false) { + snapshotShutdownProgressTracker.onClusterStateRemoveShutdown(); + } + final var currentSnapshots = SnapshotsInProgress.get(event.state()); + if (SnapshotsInProgress.get(event.previousState()).equals(currentSnapshots) == false) { - final var localNodeId = clusterService.localNode().getId(); synchronized (shardSnapshots) { + // Cancel any snapshots that have been removed from the cluster state. cancelRemoved(currentSnapshots); + + // Update running snapshots or start any snapshots that are set to run. for (final var oneRepoSnapshotsInProgress : currentSnapshots.entriesByRepo()) { for (final var snapshotsInProgressEntry : oneRepoSnapshotsInProgress) { handleUpdatedSnapshotsInProgressEntry( @@ -147,6 +183,11 @@ public final class SnapshotShardsService extends AbstractLifecycleComponent impl } } + if (isLocalNodeAddingShutdown) { + // Any active snapshots would have been signalled to pause in the previous code block. + snapshotShutdownProgressTracker.onClusterStatePausingSetForAllShardSnapshots(); + } + String previousMasterNodeId = event.previousState().nodes().getMasterNodeId(); String currentMasterNodeId = event.state().nodes().getMasterNodeId(); if (currentMasterNodeId != null && currentMasterNodeId.equals(previousMasterNodeId) == false) { @@ -164,6 +205,17 @@ public final class SnapshotShardsService extends AbstractLifecycleComponent impl } } + /** + * Determines whether we want to track this kind of shutdown for snapshot pausing progress. + * We want tracking is shutdown metadata is set, and not type RESTART. + * Note that the Shutdown API is idempotent and the type of shutdown may change to / from RESTART to / from some other type of interest. + * + * @return true if snapshots will be paused during this type of local node shutdown. + */ + private static boolean isPausingProgressTrackedShutdown(@Nullable SingleNodeShutdownMetadata localNodeShutdownMetadata) { + return localNodeShutdownMetadata != null && localNodeShutdownMetadata.getType() != SingleNodeShutdownMetadata.Type.RESTART; + } + @Override public void beforeIndexShardClosed(ShardId shardId, @Nullable IndexShard indexShard, Settings indexSettings) { // abort any snapshots occurring on the soon-to-be closed shard @@ -231,6 +283,9 @@ public final class SnapshotShardsService extends AbstractLifecycleComponent impl } } + /** + * Starts new snapshots and pauses or aborts active shard snapshot based on the updated {@link SnapshotsInProgress} entry. + */ private void handleUpdatedSnapshotsInProgressEntry(String localNodeId, boolean removingLocalNode, SnapshotsInProgress.Entry entry) { if (entry.isClone()) { // This is a snapshot clone, it will be executed on the current master @@ -364,8 +419,7 @@ public final class SnapshotShardsService extends AbstractLifecycleComponent impl final IndexVersion entryVersion, final long entryStartTime ) { - // separate method to make sure this lambda doesn't capture any heavy local objects like a SnapshotsInProgress.Entry - return () -> snapshot(shardId, snapshot, indexId, snapshotStatus, entryVersion, entryStartTime, new ActionListener<>() { + ActionListener snapshotResultListener = new ActionListener<>() { @Override public void onResponse(ShardSnapshotResult shardSnapshotResult) { final ShardGeneration newGeneration = shardSnapshotResult.getGeneration(); @@ -405,7 +459,15 @@ public final class SnapshotShardsService extends AbstractLifecycleComponent impl final var shardState = snapshotStatus.moveToUnsuccessful(nextStage, failure, threadPool.absoluteTimeInMillis()); notifyUnsuccessfulSnapshotShard(snapshot, shardId, shardState, failure, snapshotStatus.generation()); } + }; + + snapshotShutdownProgressTracker.incNumberOfShardSnapshotsInProgress(shardId, snapshot); + var decTrackerRunsBeforeResultListener = ActionListener.runAfter(snapshotResultListener, () -> { + snapshotShutdownProgressTracker.decNumberOfShardSnapshotsInProgress(shardId, snapshot, snapshotStatus); }); + + // separate method to make sure this lambda doesn't capture any heavy local objects like a SnapshotsInProgress.Entry + return () -> snapshot(shardId, snapshot, indexId, snapshotStatus, entryVersion, entryStartTime, decTrackerRunsBeforeResultListener); } // package private for testing @@ -665,19 +727,25 @@ public final class SnapshotShardsService extends AbstractLifecycleComponent impl /** Updates the shard snapshot status by sending a {@link UpdateIndexShardSnapshotStatusRequest} to the master node */ private void sendSnapshotShardUpdate(final Snapshot snapshot, final ShardId shardId, final ShardSnapshotStatus status) { + ActionListener updateResultListener = new ActionListener<>() { + @Override + public void onResponse(Void aVoid) { + logger.trace("[{}][{}] updated snapshot state to [{}]", shardId, snapshot, status); + } + + @Override + public void onFailure(Exception e) { + logger.warn(() -> format("[%s][%s] failed to update snapshot state to [%s]", shardId, snapshot, status), e); + } + }; + snapshotShutdownProgressTracker.trackRequestSentToMaster(snapshot, shardId); + var releaseTrackerRequestRunsBeforeResultListener = ActionListener.runBefore(updateResultListener, () -> { + snapshotShutdownProgressTracker.releaseRequestSentToMaster(snapshot, shardId); + }); + remoteFailedRequestDeduplicator.executeOnce( new UpdateIndexShardSnapshotStatusRequest(snapshot, shardId, status), - new ActionListener<>() { - @Override - public void onResponse(Void aVoid) { - logger.trace("[{}][{}] updated snapshot state to [{}]", shardId, snapshot, status); - } - - @Override - public void onFailure(Exception e) { - logger.warn(() -> format("[%s][%s] failed to update snapshot state to [%s]", shardId, snapshot, status), e); - } - }, + releaseTrackerRequestRunsBeforeResultListener, (req, reqListener) -> transportService.sendRequest( transportService.getLocalNode(), SnapshotsService.UPDATE_SNAPSHOT_STATUS_ACTION_NAME, diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShutdownProgressTracker.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShutdownProgressTracker.java new file mode 100644 index 000000000000..5d81e3c4e46a --- /dev/null +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShutdownProgressTracker.java @@ -0,0 +1,270 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.snapshots; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ResultDeduplicator; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; +import org.elasticsearch.threadpool.Scheduler; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicLong; +import java.util.function.Supplier; + +/** + * Tracks progress of shard snapshots during shutdown, on this single data node. Periodically reports progress via logging, the interval for + * which see {@link #SNAPSHOT_PROGRESS_DURING_SHUTDOWN_LOG_INTERVAL_SETTING}. + */ +public class SnapshotShutdownProgressTracker { + + /** How frequently shard snapshot progress is logged after receiving local node shutdown metadata. */ + public static final Setting SNAPSHOT_PROGRESS_DURING_SHUTDOWN_LOG_INTERVAL_SETTING = Setting.timeSetting( + "snapshots.shutdown.progress.interval", + TimeValue.timeValueSeconds(5), + TimeValue.MINUS_ONE, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + private static final Logger logger = LogManager.getLogger(SnapshotShutdownProgressTracker.class); + + private final Supplier getLocalNodeId; + private final ThreadPool threadPool; + + private volatile TimeValue progressLoggerInterval; + private Scheduler.Cancellable scheduledProgressLoggerFuture; + + /** + * The time at which the cluster state update began that found a shutdown signal for this node. Negative value means unset (node is not + * shutting down). + */ + private volatile long shutdownStartMillis = -1; + + /** + * The time at which the cluster state finished setting shard snapshot states to PAUSING, which the shard snapshot operations will + * discover asynchronously. Negative value means unset (node is not shutting down) + */ + private volatile long shutdownFinishedSignallingPausingMillis = -1; + + /** + * Tracks the number of shard snapshots that have started on the data node but not yet finished. + */ + private final AtomicLong numberOfShardSnapshotsInProgressOnDataNode = new AtomicLong(); + + /** + * The logic to track shard snapshot status update requests to master can result in duplicate requests (see + * {@link ResultDeduplicator}), as well as resending requests if the elected master changes. + * Tracking specific requests uniquely by snapshot ID + shard ID de-duplicates requests for tracking. + * Also tracks the absolute start time of registration, to report duration on de-registration. + */ + private final Map shardSnapshotRequests = ConcurrentCollections.newConcurrentMap(); + + /** + * Track how the shard snapshots reach completion during shutdown: did they fail, succeed or pause? + */ + private final AtomicLong doneCount = new AtomicLong(); + private final AtomicLong failureCount = new AtomicLong(); + private final AtomicLong abortedCount = new AtomicLong(); + private final AtomicLong pausedCount = new AtomicLong(); + + public SnapshotShutdownProgressTracker(Supplier localNodeIdSupplier, ClusterSettings clusterSettings, ThreadPool threadPool) { + this.getLocalNodeId = localNodeIdSupplier; + clusterSettings.initializeAndWatch( + SNAPSHOT_PROGRESS_DURING_SHUTDOWN_LOG_INTERVAL_SETTING, + value -> this.progressLoggerInterval = value + ); + this.threadPool = threadPool; + } + + private void scheduleProgressLogger() { + if (progressLoggerInterval.millis() > 0) { + scheduledProgressLoggerFuture = threadPool.scheduleWithFixedDelay( + this::logProgressReport, + progressLoggerInterval, + threadPool.executor(ThreadPool.Names.GENERIC) + ); + logger.debug( + () -> Strings.format( + "Starting shutdown snapshot progress logging on node [%s], runs every [%s]", + getLocalNodeId.get(), + progressLoggerInterval + ) + ); + } else { + logger.debug("Snapshot progress logging during shutdown is disabled"); + } + } + + private void cancelProgressLogger() { + assert scheduledProgressLoggerFuture != null : "Somehow shutdown mode was removed before it was added."; + scheduledProgressLoggerFuture.cancel(); + if (progressLoggerInterval.millis() > 0) { + // Only log cancellation if it was most likely started. Theoretically the interval setting could be updated during shutdown, + // such that the progress logger is already running and ignores the new value, but that does not currently happen. + logger.debug(() -> Strings.format("Cancelling shutdown snapshot progress logging on node [%s]", getLocalNodeId.get())); + } + } + + /** + * Logs some statistics about shard snapshot progress. + */ + private void logProgressReport() { + logger.info( + """ + Current active shard snapshot stats on data node [{}]. \ + Node shutdown cluster state update received at [{}]. \ + Finished signalling shard snapshots to pause at [{}]. \ + Number shard snapshots running [{}]. \ + Number shard snapshots waiting for master node reply to status update request [{}] \ + Shard snapshot completion stats since shutdown began: Done [{}]; Failed [{}]; Aborted [{}]; Paused [{}]\ + """, + getLocalNodeId.get(), + shutdownStartMillis, + shutdownFinishedSignallingPausingMillis, + numberOfShardSnapshotsInProgressOnDataNode.get(), + shardSnapshotRequests.size(), + doneCount.get(), + failureCount.get(), + abortedCount.get(), + pausedCount.get() + ); + } + + /** + * Called as soon as a node shutdown signal is received. + */ + public void onClusterStateAddShutdown() { + assert this.shutdownStartMillis == -1 : "Expected not to be tracking anything. Call shutdown remove before adding shutdown again"; + + // Reset these values when a new shutdown occurs, to minimize/eliminate chances of racing if shutdown is later removed and async + // shard snapshots updates continue to occur. + doneCount.set(0); + failureCount.set(0); + abortedCount.set(0); + pausedCount.set(0); + + // Track the timestamp of shutdown signal, on which to base periodic progress logging. + this.shutdownStartMillis = threadPool.relativeTimeInMillis(); + + // Start logging periodic progress reports. + scheduleProgressLogger(); + } + + /** + * Called when the cluster state update processing a shutdown signal has finished signalling (setting PAUSING) all shard snapshots to + * pause. + */ + public void onClusterStatePausingSetForAllShardSnapshots() { + assert this.shutdownStartMillis != -1 + : "Should not have left shutdown mode before finishing processing the cluster state update with shutdown"; + this.shutdownFinishedSignallingPausingMillis = threadPool.relativeTimeInMillis(); + logger.debug(() -> Strings.format("Pause signals have been set for all shard snapshots on data node [%s]", getLocalNodeId.get())); + } + + /** + * The cluster state indicating that a node is to be shutdown may be cleared instead of following through with node shutdown. In that + * case, no further shutdown shard snapshot progress reporting is desired. + */ + public void onClusterStateRemoveShutdown() { + assert shutdownStartMillis != -1 : "Expected a call to add shutdown mode before a call to remove shutdown mode."; + + // Reset the shutdown specific trackers. + this.shutdownStartMillis = -1; + this.shutdownFinishedSignallingPausingMillis = -1; + + // Turn off the progress logger, which we only want to run during shutdown. + cancelProgressLogger(); + } + + /** + * Tracks how many shard snapshots are started. + */ + public void incNumberOfShardSnapshotsInProgress(ShardId shardId, Snapshot snapshot) { + logger.debug(() -> Strings.format("Started shard (shard ID: [%s]) in snapshot ([%s])", shardId, snapshot)); + numberOfShardSnapshotsInProgressOnDataNode.incrementAndGet(); + } + + /** + * Tracks how many shard snapshots have finished since shutdown mode began. + */ + public void decNumberOfShardSnapshotsInProgress(ShardId shardId, Snapshot snapshot, IndexShardSnapshotStatus shardSnapshotStatus) { + logger.debug( + () -> Strings.format( + "Finished shard (shard ID: [%s]) in snapshot ([%s]) with status ([%s]): ", + shardId, + snapshot, + shardSnapshotStatus.toString() + ) + ); + + numberOfShardSnapshotsInProgressOnDataNode.decrementAndGet(); + if (shutdownStartMillis != -1) { + switch (shardSnapshotStatus.getStage()) { + case DONE -> doneCount.incrementAndGet(); + case FAILURE -> failureCount.incrementAndGet(); + case ABORTED -> abortedCount.incrementAndGet(); + case PAUSED -> pausedCount.incrementAndGet(); + // The other stages are active, we should only see the end result because this method is called upon completion. + default -> { + assert false : "unexpected shard snapshot stage transition during shutdown: " + shardSnapshotStatus.getStage(); + } + } + } + } + + /** + * Uniquely tracks a request to update a shard snapshot status sent to the master node. Idempotent, safe to call multiple times. + * + * @param snapshot first part of a unique tracking identifier + * @param shardId second part of a unique tracking identifier + */ + public void trackRequestSentToMaster(Snapshot snapshot, ShardId shardId) { + logger.debug(() -> Strings.format("Tracking shard (shard ID: [%s]) snapshot ([%s]) request to master", shardId, snapshot)); + shardSnapshotRequests.put(snapshot.toString() + shardId.getIndexName() + shardId.getId(), threadPool.relativeTimeInNanos()); + } + + /** + * Stops tracking a request to update a shard snapshot status sent to the master node. Idempotent, safe to call multiple times. + * + * @param snapshot first part of a unique tracking identifier + * @param shardId second part of a unique tracking identifier + */ + public void releaseRequestSentToMaster(Snapshot snapshot, ShardId shardId) { + var masterRequestStartTime = shardSnapshotRequests.remove(snapshot.toString() + shardId.getIndexName() + shardId.getId()); + // This method is may be called multiple times. Only log if this is the first time, and the entry hasn't already been removed. + if (masterRequestStartTime != null) { + logger.debug( + () -> Strings.format( + "Finished shard (shard ID: [%s]) snapshot ([%s]) update request to master in [%s]", + shardId, + snapshot, + new TimeValue(threadPool.relativeTimeInNanos() - masterRequestStartTime.longValue(), TimeUnit.NANOSECONDS) + ) + ); + } + } + + // Test only + void assertStatsForTesting(long done, long failure, long aborted, long paused) { + assert doneCount.get() == done : "doneCount is " + doneCount.get() + ", expected count was " + done; + assert failureCount.get() == failure : "failureCount is " + doneCount.get() + ", expected count was " + failure; + assert abortedCount.get() == aborted : "abortedCount is " + doneCount.get() + ", expected count was " + aborted; + assert pausedCount.get() == paused : "pausedCount is " + doneCount.get() + ", expected count was " + paused; + } +} diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotShutdownProgressTrackerTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotShutdownProgressTrackerTests.java new file mode 100644 index 000000000000..fbf742ae2ea5 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotShutdownProgressTrackerTests.java @@ -0,0 +1,407 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.snapshots; + +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.cluster.coordination.Coordinator; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.concurrent.DeterministicTaskQueue; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; +import org.elasticsearch.repositories.ShardGeneration; +import org.elasticsearch.repositories.ShardSnapshotResult; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.MockLog; +import org.elasticsearch.test.junit.annotations.TestLogging; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.Before; + +import java.util.function.BiConsumer; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.function.Supplier; + +public class SnapshotShutdownProgressTrackerTests extends ESTestCase { + private static final Logger logger = LogManager.getLogger(SnapshotShutdownProgressTrackerTests.class); + + final Settings settings = Settings.builder() + .put( + SnapshotShutdownProgressTracker.SNAPSHOT_PROGRESS_DURING_SHUTDOWN_LOG_INTERVAL_SETTING.getKey(), + TimeValue.timeValueMillis(500) + ) + .build(); + final Settings disabledTrackerLoggingSettings = Settings.builder() + .put(SnapshotShutdownProgressTracker.SNAPSHOT_PROGRESS_DURING_SHUTDOWN_LOG_INTERVAL_SETTING.getKey(), TimeValue.MINUS_ONE) + .build(); + ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + + DeterministicTaskQueue deterministicTaskQueue; + + // Construction parameters for the Tracker. + ThreadPool testThreadPool; + private final Supplier getLocalNodeIdSupplier = () -> "local-node-id-for-test"; + private final BiConsumer, Consumer> addSettingsUpdateConsumerNoOp = (setting, updateMethod) -> {}; + + // Set up some dummy shard snapshot information to feed the Tracker. + private final ShardId dummyShardId = new ShardId(new Index("index-name-for-test", "index-uuid-for-test"), 0); + private final Snapshot dummySnapshot = new Snapshot( + "snapshot-repo-name-for-test", + new SnapshotId("snapshot-name-for-test", "snapshot-uuid-for-test") + ); + Function dummyShardSnapshotStatusSupplier = (stage) -> { + var shardGen = new ShardGeneration("shard-gen-string-for-test"); + IndexShardSnapshotStatus newStatus = IndexShardSnapshotStatus.newInitializing(new ShardGeneration("shard-gen-string-for-test")); + switch (stage) { + case DONE -> { + newStatus.moveToStarted(0L, 1, 10, 2L, 20L); + newStatus.moveToFinalize(); + newStatus.moveToDone(10L, new ShardSnapshotResult(shardGen, ByteSizeValue.MINUS_ONE, 2)); + } + case ABORTED -> newStatus.abortIfNotCompleted("snapshot-aborted-for-test", (listener) -> {}); + case FAILURE -> newStatus.moveToFailed(300, "shard-snapshot-failure-string for-test"); + case PAUSED -> { + newStatus.pauseIfNotCompleted((listener) -> {}); + newStatus.moveToUnsuccessful(IndexShardSnapshotStatus.Stage.PAUSED, "shard-paused-string-for-test", 100L); + } + default -> newStatus.pauseIfNotCompleted((listener) -> {}); + } + return newStatus; + }; + + @Before + public void setUpThreadPool() { + deterministicTaskQueue = new DeterministicTaskQueue(); + testThreadPool = deterministicTaskQueue.getThreadPool(); + } + + /** + * Increments the tracker's shard snapshot completion stats. Evenly adds to each type of {@link IndexShardSnapshotStatus.Stage} stat + * supported by the tracker. + */ + void simulateShardSnapshotsCompleting(SnapshotShutdownProgressTracker tracker, int numShardSnapshots) { + for (int i = 0; i < numShardSnapshots; ++i) { + tracker.incNumberOfShardSnapshotsInProgress(dummyShardId, dummySnapshot); + IndexShardSnapshotStatus status; + switch (i % 4) { + case 0 -> status = dummyShardSnapshotStatusSupplier.apply(IndexShardSnapshotStatus.Stage.DONE); + case 1 -> status = dummyShardSnapshotStatusSupplier.apply(IndexShardSnapshotStatus.Stage.ABORTED); + case 2 -> status = dummyShardSnapshotStatusSupplier.apply(IndexShardSnapshotStatus.Stage.FAILURE); + case 3 -> status = dummyShardSnapshotStatusSupplier.apply(IndexShardSnapshotStatus.Stage.PAUSED); + // decNumberOfShardSnapshotsInProgress will throw an assertion if this value is ever set. + default -> status = dummyShardSnapshotStatusSupplier.apply(IndexShardSnapshotStatus.Stage.PAUSING); + } + logger.info("---> Generated shard snapshot status in stage (" + status.getStage() + ") for switch case (" + (i % 4) + ")"); + tracker.decNumberOfShardSnapshotsInProgress(dummyShardId, dummySnapshot, status); + } + } + + public void testTrackerLogsStats() { + SnapshotShutdownProgressTracker tracker = new SnapshotShutdownProgressTracker( + getLocalNodeIdSupplier, + clusterSettings, + testThreadPool + ); + + try (var mockLog = MockLog.capture(Coordinator.class, SnapshotShutdownProgressTracker.class)) { + mockLog.addExpectation( + new MockLog.UnseenEventExpectation( + "unset shard snapshot completion stats", + SnapshotShutdownProgressTracker.class.getCanonicalName(), + Level.INFO, + "*snapshots to pause [-1]*Done [0]; Failed [0]; Aborted [0]; Paused [0]*" + ) + ); + + // Simulate starting shutdown -- should reset the completion stats and start logging + tracker.onClusterStateAddShutdown(); + + // Wait for the initial progress log message with no shard snapshot completions. + deterministicTaskQueue.advanceTime(); + deterministicTaskQueue.runAllRunnableTasks(); + mockLog.awaitAllExpectationsMatched(); + } + + try (var mockLog = MockLog.capture(Coordinator.class, SnapshotShutdownProgressTracker.class)) { + mockLog.addExpectation( + new MockLog.SeenEventExpectation( + "shard snapshot completed stats", + SnapshotShutdownProgressTracker.class.getCanonicalName(), + Level.INFO, + "*Shard snapshot completion stats since shutdown began: Done [2]; Failed [1]; Aborted [1]; Paused [1]*" + ) + ); + + // Simulate updating the shard snapshot completion stats. + simulateShardSnapshotsCompleting(tracker, 5); + tracker.assertStatsForTesting(2, 1, 1, 1); + + // Wait for the next periodic log message to include the new completion stats. + deterministicTaskQueue.advanceTime(); + deterministicTaskQueue.runAllRunnableTasks(); + mockLog.awaitAllExpectationsMatched(); + } + } + + /** + * Test that {@link SnapshotShutdownProgressTracker#SNAPSHOT_PROGRESS_DURING_SHUTDOWN_LOG_INTERVAL_SETTING} can be disabled by setting + * a value of {@link TimeValue#MINUS_ONE}. This will disable progress logging, though the Tracker will continue to track things. + */ + @TestLogging( + value = "org.elasticsearch.snapshots.SnapshotShutdownProgressTracker:DEBUG", + reason = "Test checks for DEBUG-level log message" + ) + public void testTrackerProgressLoggingIntervalSettingCanBeDisabled() { + ClusterSettings clusterSettingsDisabledLogging = new ClusterSettings( + disabledTrackerLoggingSettings, + ClusterSettings.BUILT_IN_CLUSTER_SETTINGS + ); + SnapshotShutdownProgressTracker tracker = new SnapshotShutdownProgressTracker( + getLocalNodeIdSupplier, + clusterSettingsDisabledLogging, + testThreadPool + ); + + try (var mockLog = MockLog.capture(Coordinator.class, SnapshotShutdownProgressTracker.class)) { + mockLog.addExpectation( + new MockLog.SeenEventExpectation( + "disabled logging message", + SnapshotShutdownProgressTracker.class.getName(), + Level.DEBUG, + "Snapshot progress logging during shutdown is disabled" + ) + ); + mockLog.addExpectation( + new MockLog.UnseenEventExpectation( + "no progress logging message", + SnapshotShutdownProgressTracker.class.getName(), + Level.INFO, + "Current active shard snapshot stats on data node*" + ) + ); + + // Simulate starting shutdown -- no logging will start because the Tracker logging is disabled. + tracker.onClusterStateAddShutdown(); + tracker.onClusterStatePausingSetForAllShardSnapshots(); + + // Wait for the logging disabled message. + deterministicTaskQueue.runAllTasks(); + mockLog.awaitAllExpectationsMatched(); + } + } + + @TestLogging( + value = "org.elasticsearch.snapshots.SnapshotShutdownProgressTracker:DEBUG", + reason = "Test checks for DEBUG-level log message" + ) + public void testTrackerIntervalSettingDynamically() { + ClusterSettings clusterSettingsDisabledLogging = new ClusterSettings( + disabledTrackerLoggingSettings, + ClusterSettings.BUILT_IN_CLUSTER_SETTINGS + ); + SnapshotShutdownProgressTracker tracker = new SnapshotShutdownProgressTracker( + getLocalNodeIdSupplier, + clusterSettingsDisabledLogging, + testThreadPool + ); + // Re-enable the progress logging + clusterSettingsDisabledLogging.applySettings(settings); + + // Check that the logging is active. + try (var mockLog = MockLog.capture(Coordinator.class, SnapshotShutdownProgressTracker.class)) { + mockLog.addExpectation( + new MockLog.UnseenEventExpectation( + "disabled logging message", + SnapshotShutdownProgressTracker.class.getName(), + Level.DEBUG, + "Snapshot progress logging during shutdown is disabled" + ) + ); + mockLog.addExpectation( + new MockLog.SeenEventExpectation( + "progress logging message", + SnapshotShutdownProgressTracker.class.getName(), + Level.INFO, + "Current active shard snapshot stats on data node*" + ) + ); + + // Simulate starting shutdown -- progress logging should begin. + tracker.onClusterStateAddShutdown(); + tracker.onClusterStatePausingSetForAllShardSnapshots(); + + // Wait for the progress logging message + deterministicTaskQueue.advanceTime(); + deterministicTaskQueue.runAllRunnableTasks(); + mockLog.awaitAllExpectationsMatched(); + } + } + + public void testTrackerPauseTimestamp() { + SnapshotShutdownProgressTracker tracker = new SnapshotShutdownProgressTracker( + getLocalNodeIdSupplier, + clusterSettings, + testThreadPool + ); + + try (var mockLog = MockLog.capture(Coordinator.class, SnapshotShutdownProgressTracker.class)) { + mockLog.addExpectation( + new MockLog.SeenEventExpectation( + "pausing timestamp should be set", + SnapshotShutdownProgressTracker.class.getName(), + Level.INFO, + "*Finished signalling shard snapshots to pause at [" + testThreadPool.relativeTimeInMillis() + "]*" + ) + ); + + // Simulate starting shutdown -- start logging. + tracker.onClusterStateAddShutdown(); + + // Set a pausing complete timestamp. + tracker.onClusterStatePausingSetForAllShardSnapshots(); + + // Wait for the first log message to ensure the pausing timestamp was set. + deterministicTaskQueue.advanceTime(); + deterministicTaskQueue.runAllRunnableTasks(); + mockLog.awaitAllExpectationsMatched(); + } + } + + public void testTrackerRequestsToMaster() { + SnapshotShutdownProgressTracker tracker = new SnapshotShutdownProgressTracker( + getLocalNodeIdSupplier, + clusterSettings, + testThreadPool + ); + Snapshot snapshot = new Snapshot("repositoryName", new SnapshotId("snapshotName", "snapshotUUID")); + ShardId shardId = new ShardId(new Index("indexName", "indexUUID"), 0); + + // Simulate starting shutdown -- start logging. + tracker.onClusterStateAddShutdown(); + + // Set a pausing complete timestamp. + tracker.onClusterStatePausingSetForAllShardSnapshots(); + + try (var mockLog = MockLog.capture(Coordinator.class, SnapshotShutdownProgressTracker.class)) { + mockLog.addExpectation( + new MockLog.SeenEventExpectation( + "one master status update request", + SnapshotShutdownProgressTracker.class.getName(), + Level.INFO, + "*master node reply to status update request [1]*" + ) + ); + + tracker.trackRequestSentToMaster(snapshot, shardId); + + // Wait for the first log message to ensure the pausing timestamp was set. + deterministicTaskQueue.advanceTime(); + deterministicTaskQueue.runAllRunnableTasks(); + mockLog.awaitAllExpectationsMatched(); + } + + try (var mockLog = MockLog.capture(Coordinator.class, SnapshotShutdownProgressTracker.class)) { + mockLog.addExpectation( + new MockLog.SeenEventExpectation( + "no master status update requests", + SnapshotShutdownProgressTracker.class.getName(), + Level.INFO, + "*master node reply to status update request [0]*" + ) + ); + + tracker.releaseRequestSentToMaster(snapshot, shardId); + + // Wait for the first log message to ensure the pausing timestamp was set. + deterministicTaskQueue.advanceTime(); + deterministicTaskQueue.runAllRunnableTasks(); + mockLog.awaitAllExpectationsMatched(); + } + } + + public void testTrackerClearShutdown() { + SnapshotShutdownProgressTracker tracker = new SnapshotShutdownProgressTracker( + getLocalNodeIdSupplier, + clusterSettings, + testThreadPool + ); + + try (var mockLog = MockLog.capture(Coordinator.class, SnapshotShutdownProgressTracker.class)) { + mockLog.addExpectation( + new MockLog.UnseenEventExpectation( + "pausing timestamp should be unset", + SnapshotShutdownProgressTracker.class.getName(), + Level.INFO, + "*Finished signalling shard snapshots to pause at [-1]*" + ) + ); + + // Simulate starting shutdown -- start logging. + tracker.onClusterStateAddShutdown(); + + // Set a pausing complete timestamp. + tracker.onClusterStatePausingSetForAllShardSnapshots(); + + // Wait for the first log message to ensure the pausing timestamp was set. + deterministicTaskQueue.advanceTime(); + deterministicTaskQueue.runAllRunnableTasks(); + mockLog.awaitAllExpectationsMatched(); + } + + try (var mockLog = MockLog.capture(Coordinator.class, SnapshotShutdownProgressTracker.class)) { + mockLog.addExpectation( + new MockLog.SeenEventExpectation( + "logging completed shard snapshot stats", + SnapshotShutdownProgressTracker.class.getName(), + Level.INFO, + "*Done [2]; Failed [2]; Aborted [2]; Paused [1]*" + ) + ); + + // Simulate updating the shard snapshot completion stats. + simulateShardSnapshotsCompleting(tracker, 7); + tracker.assertStatsForTesting(2, 2, 2, 1); + + // Wait for the first log message to ensure the pausing timestamp was set. + deterministicTaskQueue.advanceTime(); + deterministicTaskQueue.runAllRunnableTasks(); + mockLog.awaitAllExpectationsMatched(); + } + + // Clear start and pause timestamps + tracker.onClusterStateRemoveShutdown(); + + try (var mockLog = MockLog.capture(Coordinator.class, SnapshotShutdownProgressTracker.class)) { + mockLog.addExpectation( + new MockLog.SeenEventExpectation( + "completed shard snapshot stats are reset", + SnapshotShutdownProgressTracker.class.getName(), + Level.INFO, + "*Done [0]; Failed [0]; Aborted [0]; Paused [0]" + ) + ); + + // Start logging again and check that the pause timestamp was reset from the last time. + tracker.onClusterStateAddShutdown(); + + // Wait for the first log message to ensure the pausing timestamp was set. + deterministicTaskQueue.advanceTime(); + deterministicTaskQueue.runAllRunnableTasks(); + mockLog.awaitAllExpectationsMatched(); + } + + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 5a40816c94be..87a834d6424b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -45,6 +45,7 @@ import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; import org.elasticsearch.action.admin.indices.segments.IndicesSegmentResponse; import org.elasticsearch.action.admin.indices.segments.ShardSegments; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequestBuilder; +import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequestBuilder; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; @@ -1545,6 +1546,37 @@ public abstract class ESIntegTestCase extends ESTestCase { return prepareIndex(index).setId(id).setSource(source).get(); } + /** + * Runs random indexing until each shard in the given index is at least minBytesPerShard in size. + * Force merges all cluster shards down to one segment, and then invokes refresh to ensure all shard data is visible for readers, + * before returning. + * + * @return The final {@link ShardStats} for all shards of the index. + */ + protected ShardStats[] indexAllShardsToAnEqualOrGreaterMinimumSize(final String indexName, long minBytesPerShard) { + while (true) { + indexRandom(false, indexName, scaledRandomIntBetween(100, 10000)); + forceMerge(); + refresh(); + + final ShardStats[] shardStats = indicesAdmin().prepareStats(indexName) + .clear() + .setStore(true) + .setTranslog(true) + .get() + .getShards(); + + var smallestShardSize = Arrays.stream(shardStats) + .mapToLong(it -> it.getStats().getStore().sizeInBytes()) + .min() + .orElseThrow(() -> new AssertionError("no shards")); + + if (smallestShardSize >= minBytesPerShard) { + return shardStats; + } + } + } + /** * Syntactic sugar for: *

From a21ae458fa2abc554aef74f74fad2ec1d8856418 Mon Sep 17 00:00:00 2001
From: Brendan Cully 
Date: Tue, 8 Oct 2024 14:17:33 -0700
Subject: [PATCH 67/85] Unmute `SearchSlowLogTests` (#114364)

These were fixed in #114344
---
 muted-tests.yml | 6 ------
 1 file changed, 6 deletions(-)

diff --git a/muted-tests.yml b/muted-tests.yml
index 88379d4533a5..91f26674373d 100644
--- a/muted-tests.yml
+++ b/muted-tests.yml
@@ -362,12 +362,6 @@ tests:
 - class: org.elasticsearch.xpack.inference.services.openai.OpenAiServiceTests
   method: testInfer_StreamRequest
   issue: https://github.com/elastic/elasticsearch/issues/114232
-- class: org.elasticsearch.index.SearchSlowLogTests
-  method: testLevelPrecedence
-  issue: https://github.com/elastic/elasticsearch/issues/114300
-- class: org.elasticsearch.index.SearchSlowLogTests
-  method: testTwoLoggersDifferentLevel
-  issue: https://github.com/elastic/elasticsearch/issues/114301
 - class: org.elasticsearch.xpack.inference.services.cohere.CohereServiceTests
   method: testInfer_StreamRequest_ErrorResponse
   issue: https://github.com/elastic/elasticsearch/issues/114327

From 1a8f50608c981425a2b4bceba6e60d06c8836975 Mon Sep 17 00:00:00 2001
From: elasticsearchmachine
 <58790826+elasticsearchmachine@users.noreply.github.com>
Date: Wed, 9 Oct 2024 08:37:48 +1100
Subject: [PATCH 68/85] Mute
 org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT test
 {yaml=cluster.stats/30_ccs_stats/cross-cluster search stats search} #114371

---
 muted-tests.yml | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/muted-tests.yml b/muted-tests.yml
index 91f26674373d..5ab607fab209 100644
--- a/muted-tests.yml
+++ b/muted-tests.yml
@@ -368,6 +368,9 @@ tests:
 - class: org.elasticsearch.xpack.rank.rrf.RRFRankClientYamlTestSuiteIT
   method: test {yaml=rrf/700_rrf_retriever_search_api_compatibility/rrf retriever with top-level collapse}
   issue: https://github.com/elastic/elasticsearch/issues/114331
+- class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT
+  method: test {yaml=cluster.stats/30_ccs_stats/cross-cluster search stats search}
+  issue: https://github.com/elastic/elasticsearch/issues/114371
 
 # Examples:
 #

From bee1d912c47f7404079d40786ca23599b36b3019 Mon Sep 17 00:00:00 2001
From: Nick Tindall 
Date: Wed, 9 Oct 2024 09:15:59 +1100
Subject: [PATCH 69/85] Use RestStatus.isSuccessful to deduplicate status code
 checks (#114279)

---
 .../AlibabaCloudSearchResponseHandler.java                   | 3 ++-
 .../external/anthropic/AnthropicResponseHandler.java         | 4 ++--
 .../inference/external/cohere/CohereResponseHandler.java     | 4 ++--
 .../elastic/ElasticInferenceServiceResponseHandler.java      | 4 ++--
 .../googleaistudio/GoogleAiStudioResponseHandler.java        | 4 ++--
 .../googlevertexai/GoogleVertexAiResponseHandler.java        | 4 ++--
 .../xpack/inference/external/http/HttpResult.java            | 5 +++++
 .../external/huggingface/HuggingFaceResponseHandler.java     | 4 ++--
 .../external/ibmwatsonx/IbmWatsonxResponseHandler.java       | 4 ++--
 .../inference/external/openai/OpenAiResponseHandler.java     | 4 ++--
 .../external/request/ibmwatsonx/IbmWatsonxRequest.java       | 2 +-
 .../response/AzureMistralOpenAiExternalResponseHandler.java  | 4 ++--
 .../email/attachment/HttpEmailAttachementParser.java         | 3 ++-
 .../xpack/watcher/notification/slack/SentMessages.java       | 3 ++-
 14 files changed, 30 insertions(+), 22 deletions(-)

diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/alibabacloudsearch/AlibabaCloudSearchResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/alibabacloudsearch/AlibabaCloudSearchResponseHandler.java
index 05d51372d9cd..ecfa988b5035 100644
--- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/alibabacloudsearch/AlibabaCloudSearchResponseHandler.java
+++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/alibabacloudsearch/AlibabaCloudSearchResponseHandler.java
@@ -8,6 +8,7 @@
 package org.elasticsearch.xpack.inference.external.alibabacloudsearch;
 
 import org.apache.logging.log4j.Logger;
+import org.elasticsearch.rest.RestStatus;
 import org.elasticsearch.xpack.inference.external.http.HttpResult;
 import org.elasticsearch.xpack.inference.external.http.retry.BaseResponseHandler;
 import org.elasticsearch.xpack.inference.external.http.retry.ResponseParser;
@@ -43,7 +44,7 @@ public class AlibabaCloudSearchResponseHandler extends BaseResponseHandler {
      */
     void checkForFailureStatusCode(Request request, HttpResult result) throws RetryException {
         int statusCode = result.response().getStatusLine().getStatusCode();
-        if (statusCode >= 200 && statusCode < 300) {
+        if (RestStatus.isSuccessful(statusCode)) {
             return;
         }
 
diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/anthropic/AnthropicResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/anthropic/AnthropicResponseHandler.java
index cab2c655b9ff..aec47f19b264 100644
--- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/anthropic/AnthropicResponseHandler.java
+++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/anthropic/AnthropicResponseHandler.java
@@ -61,12 +61,12 @@ public class AnthropicResponseHandler extends BaseResponseHandler {
      * @throws RetryException Throws if status code is {@code >= 300 or < 200 }
      */
     void checkForFailureStatusCode(Request request, HttpResult result) throws RetryException {
-        int statusCode = result.response().getStatusLine().getStatusCode();
-        if (statusCode >= 200 && statusCode < 300) {
+        if (result.isSuccessfulResponse()) {
             return;
         }
 
         // handle error codes
+        int statusCode = result.response().getStatusLine().getStatusCode();
         if (statusCode == 500) {
             throw new RetryException(true, buildError(SERVER_ERROR, request, result));
         } else if (statusCode == 529) {
diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/cohere/CohereResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/cohere/CohereResponseHandler.java
index 3579cd4100bb..ac2e1747f805 100644
--- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/cohere/CohereResponseHandler.java
+++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/cohere/CohereResponseHandler.java
@@ -74,12 +74,12 @@ public class CohereResponseHandler extends BaseResponseHandler {
      * @throws RetryException Throws if status code is {@code >= 300 or < 200 }
      */
     void checkForFailureStatusCode(Request request, HttpResult result) throws RetryException {
-        int statusCode = result.response().getStatusLine().getStatusCode();
-        if (statusCode >= 200 && statusCode < 300) {
+        if (result.isSuccessfulResponse()) {
             return;
         }
 
         // handle error codes
+        int statusCode = result.response().getStatusLine().getStatusCode();
         if (statusCode == 500) {
             throw new RetryException(true, buildError(SERVER_ERROR, request, result));
         } else if (statusCode > 500) {
diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/elastic/ElasticInferenceServiceResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/elastic/ElasticInferenceServiceResponseHandler.java
index 15e543fadad7..2b79afb3b56f 100644
--- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/elastic/ElasticInferenceServiceResponseHandler.java
+++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/elastic/ElasticInferenceServiceResponseHandler.java
@@ -33,11 +33,11 @@ public class ElasticInferenceServiceResponseHandler extends BaseResponseHandler
     }
 
     void checkForFailureStatusCode(Request request, HttpResult result) throws RetryException {
-        int statusCode = result.response().getStatusLine().getStatusCode();
-        if (statusCode >= 200 && statusCode < 300) {
+        if (result.isSuccessfulResponse()) {
             return;
         }
 
+        int statusCode = result.response().getStatusLine().getStatusCode();
         if (statusCode == 500) {
             throw new RetryException(true, buildError(SERVER_ERROR, request, result));
         } else if (statusCode == 400) {
diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/googleaistudio/GoogleAiStudioResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/googleaistudio/GoogleAiStudioResponseHandler.java
index 1138cfcb7cdc..4ba5b552f802 100644
--- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/googleaistudio/GoogleAiStudioResponseHandler.java
+++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/googleaistudio/GoogleAiStudioResponseHandler.java
@@ -43,12 +43,12 @@ public class GoogleAiStudioResponseHandler extends BaseResponseHandler {
      * @throws RetryException Throws if status code is {@code >= 300 or < 200 }
      */
     void checkForFailureStatusCode(Request request, HttpResult result) throws RetryException {
-        int statusCode = result.response().getStatusLine().getStatusCode();
-        if (statusCode >= 200 && statusCode < 300) {
+        if (result.isSuccessfulResponse()) {
             return;
         }
 
         // handle error codes
+        int statusCode = result.response().getStatusLine().getStatusCode();
         if (statusCode == 500) {
             throw new RetryException(true, buildError(SERVER_ERROR, request, result));
         } else if (statusCode == 503) {
diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/googlevertexai/GoogleVertexAiResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/googlevertexai/GoogleVertexAiResponseHandler.java
index 872bf51f3662..6b1aef9856d3 100644
--- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/googlevertexai/GoogleVertexAiResponseHandler.java
+++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/googlevertexai/GoogleVertexAiResponseHandler.java
@@ -35,12 +35,12 @@ public class GoogleVertexAiResponseHandler extends BaseResponseHandler {
     }
 
     void checkForFailureStatusCode(Request request, HttpResult result) throws RetryException {
-        int statusCode = result.response().getStatusLine().getStatusCode();
-        if (statusCode >= 200 && statusCode < 300) {
+        if (result.isSuccessfulResponse()) {
             return;
         }
 
         // handle error codes
+        int statusCode = result.response().getStatusLine().getStatusCode();
         if (statusCode == 500) {
             throw new RetryException(true, buildError(SERVER_ERROR, request, result));
         } else if (statusCode == 503) {
diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpResult.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpResult.java
index 6c79daa2dedc..68a94ac0b0c0 100644
--- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpResult.java
+++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpResult.java
@@ -10,6 +10,7 @@ package org.elasticsearch.xpack.inference.external.http;
 import org.apache.http.HttpResponse;
 import org.elasticsearch.common.unit.ByteSizeValue;
 import org.elasticsearch.core.Streams;
+import org.elasticsearch.rest.RestStatus;
 import org.elasticsearch.xpack.inference.common.SizeLimitInputStream;
 
 import java.io.ByteArrayOutputStream;
@@ -47,4 +48,8 @@ public record HttpResult(HttpResponse response, byte[] body) {
     public boolean isBodyEmpty() {
         return body().length == 0;
     }
+
+    public boolean isSuccessfulResponse() {
+        return RestStatus.isSuccessful(response.getStatusLine().getStatusCode());
+    }
 }
diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandler.java
index 59804b37e465..f6fd9afabe28 100644
--- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandler.java
+++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandler.java
@@ -41,11 +41,11 @@ public class HuggingFaceResponseHandler extends BaseResponseHandler {
      * @throws RetryException thrown if status code is {@code >= 300 or < 200}
      */
     void checkForFailureStatusCode(Request request, HttpResult result) throws RetryException {
-        int statusCode = result.response().getStatusLine().getStatusCode();
-        if (statusCode >= 200 && statusCode < 300) {
+        if (result.isSuccessfulResponse()) {
             return;
         }
 
+        int statusCode = result.response().getStatusLine().getStatusCode();
         if (statusCode == 503 || statusCode == 502 || statusCode == 429) {
             throw new RetryException(true, buildError(RATE_LIMIT, request, result));
         } else if (statusCode >= 500) {
diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/ibmwatsonx/IbmWatsonxResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/ibmwatsonx/IbmWatsonxResponseHandler.java
index 161ca6966cec..cb686ddb654d 100644
--- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/ibmwatsonx/IbmWatsonxResponseHandler.java
+++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/ibmwatsonx/IbmWatsonxResponseHandler.java
@@ -42,11 +42,11 @@ public class IbmWatsonxResponseHandler extends BaseResponseHandler {
      * @throws RetryException thrown if status code is {@code >= 300 or < 200}
      */
     void checkForFailureStatusCode(Request request, HttpResult result) throws RetryException {
-        int statusCode = result.response().getStatusLine().getStatusCode();
-        if (statusCode >= 200 && statusCode < 300) {
+        if (result.isSuccessfulResponse()) {
             return;
         }
 
+        int statusCode = result.response().getStatusLine().getStatusCode();
         if (statusCode == 500) {
             throw new RetryException(true, buildError(SERVER_ERROR, request, result));
         } else if (statusCode == 404) {
diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java
index c193280e1978..6404236d5118 100644
--- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java
+++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java
@@ -67,12 +67,12 @@ public class OpenAiResponseHandler extends BaseResponseHandler {
      * @throws RetryException Throws if status code is {@code >= 300 or < 200 }
      */
     void checkForFailureStatusCode(Request request, HttpResult result) throws RetryException {
-        int statusCode = result.response().getStatusLine().getStatusCode();
-        if (statusCode >= 200 && statusCode < 300) {
+        if (result.isSuccessfulResponse()) {
             return;
         }
 
         // handle error codes
+        int statusCode = result.response().getStatusLine().getStatusCode();
         if (statusCode == 500) {
             throw new RetryException(true, buildError(SERVER_ERROR, request, result));
         } else if (statusCode == 503) {
diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxRequest.java
index e5ac64624a69..d9dd6c4c8b44 100644
--- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxRequest.java
+++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/ibmwatsonx/IbmWatsonxRequest.java
@@ -78,7 +78,7 @@ public interface IbmWatsonxRequest extends Request {
 
     static void validateResponse(String bearerTokenGenUrl, String inferenceId, HttpResponse response) {
         int statusCode = response.getStatusLine().getStatusCode();
-        if (statusCode >= 200 && statusCode < 300) {
+        if (RestStatus.isSuccessful(statusCode)) {
             return;
         }
 
diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/AzureMistralOpenAiExternalResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/AzureMistralOpenAiExternalResponseHandler.java
index e4e96ca644c7..3116bf0f6cd2 100644
--- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/AzureMistralOpenAiExternalResponseHandler.java
+++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/AzureMistralOpenAiExternalResponseHandler.java
@@ -59,12 +59,12 @@ public class AzureMistralOpenAiExternalResponseHandler extends BaseResponseHandl
     }
 
     public void checkForFailureStatusCode(Request request, HttpResult result) throws RetryException {
-        int statusCode = result.response().getStatusLine().getStatusCode();
-        if (statusCode >= 200 && statusCode < 300) {
+        if (result.isSuccessfulResponse()) {
             return;
         }
 
         // handle error codes
+        int statusCode = result.response().getStatusLine().getStatusCode();
         if (statusCode == 500) {
             throw handle500Error(request, result);
         } else if (statusCode == 503) {
diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/HttpEmailAttachementParser.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/HttpEmailAttachementParser.java
index e4d7fcc3a293..19b9f68ae961 100644
--- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/HttpEmailAttachementParser.java
+++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/attachment/HttpEmailAttachementParser.java
@@ -10,6 +10,7 @@ import org.elasticsearch.ElasticsearchException;
 import org.elasticsearch.ElasticsearchParseException;
 import org.elasticsearch.common.Strings;
 import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.rest.RestStatus;
 import org.elasticsearch.xcontent.ParseField;
 import org.elasticsearch.xcontent.XContentParser;
 import org.elasticsearch.xpack.core.watcher.execution.WatchExecutionContext;
@@ -84,7 +85,7 @@ public class HttpEmailAttachementParser implements EmailAttachmentParser= 200 && response.status() < 300) {
+        if (RestStatus.isSuccessful(response.status())) {
             if (response.hasContent()) {
                 String contentType = attachment.getContentType();
                 String attachmentContentType = Strings.hasLength(contentType) ? contentType : response.contentType();
diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SentMessages.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SentMessages.java
index 94d17844f06d..f98bb9952586 100644
--- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SentMessages.java
+++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SentMessages.java
@@ -8,6 +8,7 @@ package org.elasticsearch.xpack.watcher.notification.slack;
 
 import org.elasticsearch.ElasticsearchException;
 import org.elasticsearch.core.Nullable;
+import org.elasticsearch.rest.RestStatus;
 import org.elasticsearch.xcontent.ParseField;
 import org.elasticsearch.xcontent.ToXContentObject;
 import org.elasticsearch.xcontent.XContentBuilder;
@@ -110,7 +111,7 @@ public class SentMessages implements ToXContentObject, Iterable= 200 && response.status() < 300;
+            return response != null && RestStatus.isSuccessful(response.status());
         }
 
         @Override

From 07e6932405a75aa9062d56fb5f3751965de1d5d4 Mon Sep 17 00:00:00 2001
From: Joe Gallo 
Date: Tue, 8 Oct 2024 19:48:05 -0300
Subject: [PATCH 70/85] IPinfo geolocation support (#114311)

---
 .../elasticsearch/ingest/geoip/Database.java  |  14 ++-
 .../ingest/geoip/IpinfoIpDataLookups.java     | 103 ++++++++++++++++++
 .../geoip/IpinfoIpDataLookupsTests.java       |  91 +++++++++++++++-
 .../ingest/geoip/MaxMindSupportTests.java     |   2 +-
 .../ipinfo/ip_geolocation_sample.mmdb         | Bin 0 -> 33552 bytes
 5 files changed, 205 insertions(+), 5 deletions(-)
 create mode 100644 modules/ingest-geoip/src/test/resources/ipinfo/ip_geolocation_sample.mmdb

diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java
index 128c16e16376..4c2f047c3570 100644
--- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java
+++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java
@@ -169,7 +169,19 @@ enum Database {
             Property.TYPE
         ),
         Set.of(Property.IP, Property.ASN, Property.ORGANIZATION_NAME, Property.NETWORK)
-    );
+    ),
+    CityV2(
+        Set.of(
+            Property.IP,
+            Property.COUNTRY_ISO_CODE,
+            Property.REGION_NAME,
+            Property.CITY_NAME,
+            Property.TIMEZONE,
+            Property.LOCATION,
+            Property.POSTAL_CODE
+        ),
+        Set.of(Property.COUNTRY_ISO_CODE, Property.REGION_NAME, Property.CITY_NAME, Property.LOCATION)
+    ),;
 
     private final Set properties;
     private final Set defaultProperties;
diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookups.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookups.java
index ac7f56468f37..d2c734cb9bae 100644
--- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookups.java
+++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookups.java
@@ -58,6 +58,25 @@ final class IpinfoIpDataLookups {
         }
     }
 
+    /**
+     * Lax-ly parses a string that contains a double into a Double (or null, if such parsing isn't possible).
+     * @param latlon a potentially empty (or null) string that is expected to contain a parsable double
+     * @return the parsed double
+     */
+    static Double parseLocationDouble(final String latlon) {
+        if (latlon == null || Strings.hasText(latlon) == false) {
+            return null;
+        } else {
+            String stripped = latlon.trim();
+            try {
+                return Double.parseDouble(stripped);
+            } catch (NumberFormatException e) {
+                logger.trace("Unable to parse non-compliant location string [{}]", latlon);
+                return null;
+            }
+        }
+    }
+
     public record AsnResult(
         Long asn,
         @Nullable String country, // not present in the free asn database
@@ -88,6 +107,31 @@ final class IpinfoIpDataLookups {
         public CountryResult {}
     }
 
+    public record GeolocationResult(
+        String city,
+        String country,
+        Double latitude,
+        Double longitude,
+        String postalCode,
+        String region,
+        String timezone
+    ) {
+        @SuppressWarnings("checkstyle:RedundantModifier")
+        @MaxMindDbConstructor
+        public GeolocationResult(
+            @MaxMindDbParameter(name = "city") String city,
+            @MaxMindDbParameter(name = "country") String country,
+            @MaxMindDbParameter(name = "latitude") String latitude,
+            @MaxMindDbParameter(name = "longitude") String longitude,
+            // @MaxMindDbParameter(name = "network") String network, // for now we're not exposing this
+            @MaxMindDbParameter(name = "postal_code") String postalCode,
+            @MaxMindDbParameter(name = "region") String region,
+            @MaxMindDbParameter(name = "timezone") String timezone
+        ) {
+            this(city, country, parseLocationDouble(latitude), parseLocationDouble(longitude), postalCode, region, timezone);
+        }
+    }
+
     static class Asn extends AbstractBase {
         Asn(Set properties) {
             super(properties, AsnResult.class);
@@ -183,6 +227,65 @@ final class IpinfoIpDataLookups {
         }
     }
 
+    static class Geolocation extends AbstractBase {
+        Geolocation(final Set properties) {
+            super(properties, GeolocationResult.class);
+        }
+
+        @Override
+        protected Map transform(final Result result) {
+            GeolocationResult response = result.result;
+
+            Map data = new HashMap<>();
+            for (Database.Property property : this.properties) {
+                switch (property) {
+                    case IP -> data.put("ip", result.ip);
+                    case COUNTRY_ISO_CODE -> {
+                        String countryIsoCode = response.country;
+                        if (countryIsoCode != null) {
+                            data.put("country_iso_code", countryIsoCode);
+                        }
+                    }
+                    case REGION_NAME -> {
+                        String subdivisionName = response.region;
+                        if (subdivisionName != null) {
+                            data.put("region_name", subdivisionName);
+                        }
+                    }
+                    case CITY_NAME -> {
+                        String cityName = response.city;
+                        if (cityName != null) {
+                            data.put("city_name", cityName);
+                        }
+                    }
+                    case TIMEZONE -> {
+                        String locationTimeZone = response.timezone;
+                        if (locationTimeZone != null) {
+                            data.put("timezone", locationTimeZone);
+                        }
+                    }
+                    case POSTAL_CODE -> {
+                        String postalCode = response.postalCode;
+                        if (postalCode != null) {
+                            data.put("postal_code", postalCode);
+                        }
+                    }
+                    case LOCATION -> {
+                        Double latitude = response.latitude;
+                        Double longitude = response.longitude;
+                        if (latitude != null && longitude != null) {
+                            Map locationObject = new HashMap<>();
+                            locationObject.put("lat", latitude);
+                            locationObject.put("lon", longitude);
+                            data.put("location", locationObject);
+                        }
+                    }
+                }
+            }
+            return data;
+        }
+    }
+
     /**
      * Just a little record holder -- there's the data that we receive via the binding to our record objects from the Reader via the
      * getRecord call, but then we also need to capture the passed-in ip address that came from the caller as well as the network for
diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java
index 5689693d6c29..f58f8819e7ed 100644
--- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java
+++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IpinfoIpDataLookupsTests.java
@@ -38,6 +38,7 @@ import java.util.function.BiConsumer;
 import static java.util.Map.entry;
 import static org.elasticsearch.ingest.geoip.GeoIpTestUtils.copyDatabase;
 import static org.elasticsearch.ingest.geoip.IpinfoIpDataLookups.parseAsn;
+import static org.elasticsearch.ingest.geoip.IpinfoIpDataLookups.parseLocationDouble;
 import static org.hamcrest.Matchers.empty;
 import static org.hamcrest.Matchers.equalTo;
 import static org.hamcrest.Matchers.is;
@@ -72,6 +73,10 @@ public class IpinfoIpDataLookupsTests extends ESTestCase {
         // the second ASN variant database is like a specialization of the ASN database
         assertThat(Sets.difference(Database.Asn.properties(), Database.AsnV2.properties()), is(empty()));
         assertThat(Database.Asn.defaultProperties(), equalTo(Database.AsnV2.defaultProperties()));
+
+        // the second City variant database is like a version of the ordinary City database but lacking many fields
+        assertThat(Sets.difference(Database.CityV2.properties(), Database.City.properties()), is(empty()));
+        assertThat(Sets.difference(Database.CityV2.defaultProperties(), Database.City.defaultProperties()), is(empty()));
     }
 
     public void testParseAsn() {
@@ -88,6 +93,18 @@ public class IpinfoIpDataLookupsTests extends ESTestCase {
         assertThat(parseAsn("anythingelse"), nullValue());
     }
 
+    public void testParseLocationDouble() {
+        // expected case: "123.45" is 123.45
+        assertThat(parseLocationDouble("123.45"), equalTo(123.45));
+        // defensive cases: null and empty becomes null, this is not expected fwiw
+        assertThat(parseLocationDouble(null), nullValue());
+        assertThat(parseLocationDouble(""), nullValue());
+        // defensive cases: we strip whitespace
+        assertThat(parseLocationDouble("  -123.45  "), equalTo(-123.45));
+        // bottom case: a non-parsable string is null
+        assertThat(parseLocationDouble("anythingelse"), nullValue());
+    }
+
     public void testAsn() throws IOException {
         assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS);
         Path configDir = tmpDir;
@@ -100,7 +117,7 @@ public class IpinfoIpDataLookupsTests extends ESTestCase {
 
         // this is the 'free' ASN database (sample)
         try (DatabaseReaderLazyLoader loader = configDatabases.getDatabase("ip_asn_sample.mmdb")) {
-            IpDataLookup lookup = new IpinfoIpDataLookups.Asn(Set.of(Database.Property.values()));
+            IpDataLookup lookup = new IpinfoIpDataLookups.Asn(Database.AsnV2.properties());
             Map data = lookup.getData(loader, "5.182.109.0");
             assertThat(
                 data,
@@ -118,7 +135,7 @@ public class IpinfoIpDataLookupsTests extends ESTestCase {
 
         // this is the non-free or 'standard' ASN database (sample)
         try (DatabaseReaderLazyLoader loader = configDatabases.getDatabase("asn_sample.mmdb")) {
-            IpDataLookup lookup = new IpinfoIpDataLookups.Asn(Set.of(Database.Property.values()));
+            IpDataLookup lookup = new IpinfoIpDataLookups.Asn(Database.AsnV2.properties());
             Map data = lookup.getData(loader, "23.53.116.0");
             assertThat(
                 data,
@@ -185,7 +202,7 @@ public class IpinfoIpDataLookupsTests extends ESTestCase {
 
         // this is the 'free' Country database (sample)
         try (DatabaseReaderLazyLoader loader = configDatabases.getDatabase("ip_country_sample.mmdb")) {
-            IpDataLookup lookup = new IpinfoIpDataLookups.Country(Set.of(Database.Property.values()));
+            IpDataLookup lookup = new IpinfoIpDataLookups.Country(Database.Country.properties());
             Map data = lookup.getData(loader, "4.221.143.168");
             assertThat(
                 data,
@@ -202,6 +219,74 @@ public class IpinfoIpDataLookupsTests extends ESTestCase {
         }
     }
 
+    public void testGeolocation() throws IOException {
+        assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS);
+        Path configDir = tmpDir;
+        copyDatabase("ipinfo/ip_geolocation_sample.mmdb", configDir.resolve("ip_geolocation_sample.mmdb"));
+
+        GeoIpCache cache = new GeoIpCache(1000); // real cache to test purging of entries upon a reload
+        ConfigDatabases configDatabases = new ConfigDatabases(configDir, cache);
+        configDatabases.initialize(resourceWatcherService);
+
+        // this is the non-free or 'standard' Geolocation database (sample)
+        try (DatabaseReaderLazyLoader loader = configDatabases.getDatabase("ip_geolocation_sample.mmdb")) {
+            IpDataLookup lookup = new IpinfoIpDataLookups.Geolocation(Database.CityV2.properties());
+            Map data = lookup.getData(loader, "2.124.90.182");
+            assertThat(
+                data,
+                equalTo(
+                    Map.ofEntries(
+                        entry("ip", "2.124.90.182"),
+                        entry("country_iso_code", "GB"),
+                        entry("region_name", "England"),
+                        entry("city_name", "London"),
+                        entry("timezone", "Europe/London"),
+                        entry("postal_code", "E1W"),
+                        entry("location", Map.of("lat", 51.50853, "lon", -0.12574))
+                    )
+                )
+            );
+        }
+    }
+
+    public void testGeolocationInvariants() {
+        assumeFalse("https://github.com/elastic/elasticsearch/issues/114266", Constants.WINDOWS);
+        Path configDir = tmpDir;
+        copyDatabase("ipinfo/ip_geolocation_sample.mmdb", configDir.resolve("ip_geolocation_sample.mmdb"));
+
+        {
+            final Set expectedColumns = Set.of(
+                "network",
+                "city",
+                "region",
+                "country",
+                "postal_code",
+                "timezone",
+                "latitude",
+                "longitude"
+            );
+
+            Path databasePath = configDir.resolve("ip_geolocation_sample.mmdb");
+            assertDatabaseInvariants(databasePath, (ip, row) -> {
+                assertThat(row.keySet(), equalTo(expectedColumns));
+                {
+                    String latitude = (String) row.get("latitude");
+                    assertThat(latitude, equalTo(latitude.trim()));
+                    Double parsed = parseLocationDouble(latitude);
+                    assertThat(parsed, notNullValue());
+                    assertThat(latitude, equalTo(Double.toString(parsed))); // reverse it
+                }
+                {
+                    String longitude = (String) row.get("longitude");
+                    assertThat(longitude, equalTo(longitude.trim()));
+                    Double parsed = parseLocationDouble(longitude);
+                    assertThat(parsed, notNullValue());
+                    assertThat(longitude, equalTo(Double.toString(parsed))); // reverse it
+                }
+            });
+        }
+    }
+
     private static void assertDatabaseInvariants(final Path databasePath, final BiConsumer> rowConsumer) {
         try (Reader reader = new Reader(pathToFile(databasePath))) {
             Networks networks = reader.networks(Map.class);
diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxMindSupportTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxMindSupportTests.java
index 1e05cf2b3ba3..d377a9b97fcc 100644
--- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxMindSupportTests.java
+++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxMindSupportTests.java
@@ -361,7 +361,7 @@ public class MaxMindSupportTests extends ESTestCase {
 
     private static final Set> KNOWN_UNSUPPORTED_RESPONSE_CLASSES = Set.of(IpRiskResponse.class);
 
-    private static final Set KNOWN_UNSUPPORTED_DATABASE_VARIANTS = Set.of(Database.AsnV2);
+    private static final Set KNOWN_UNSUPPORTED_DATABASE_VARIANTS = Set.of(Database.AsnV2, Database.CityV2);
 
     public void testMaxMindSupport() {
         for (Database databaseType : Database.values()) {
diff --git a/modules/ingest-geoip/src/test/resources/ipinfo/ip_geolocation_sample.mmdb b/modules/ingest-geoip/src/test/resources/ipinfo/ip_geolocation_sample.mmdb
new file mode 100644
index 0000000000000000000000000000000000000000..ed738bdde145082d329a40b878618bc5f4595932
GIT binary patch
literal 33552
zcmb`N2Y6IP_r^CNfrQ?BS$Zg0wl_r$q!(HOh=_5MY?6g!7dHt_MX~qZE1-yq1;wt|
z0L5Ohi(&;E_Fn$)%)FaD@Hd~|_dMb={?2*Nxik09otZoLZY&l{ipA3LH;cuRO186D
z(){q#=%#30dFNeQE_($`&W1t@^^p(_)
zvsgNDp6Yg2A@2l{cOv|ggnu%RI|cfwLO%`u>B2vQd1pdDi}u+fehzf>wQln~p`TCf
z0`fxgBJyJL5*&Xi$`$l4BQH1Xv>LH1sa++st8u-E>2|M0&I>5;E%#Ad1ADEpbCgg{(<~IMf@*gr$AQqroiZOP%@sEQa%*B);
zC>X;jLs2jnQ-+~jVB#rO*uzoEQAVHyQARR0O4K)++8D&g(#{g`an!~mK7sZ`GF#-?
z5T8WdE_4SqC+R|-Tf{x^y^Q%tKN+xCy0UK)OgRAg2a4k-!{_}?Iauh2K+h5W6xKNv
zdals(;O8?|AoN1$MZzzJUn2Zc9#ck6Lwq{QOq3Zo??pyeOqrFm^_Z-Hwh(1DkDWu#
zHPktWQk#do!-PH`{^7!30RIT)Rto({Y9Yj{gkDX(h72QKE8=zV>xCb|wd`Q7x|U;6
z8t6BYQ8H$z&wnwsqsTapYogsuwvbE6rH1F%OpJAxa>rCipQ9oPg=TJMBJdZrzV(G@~)W=^){UY*W
z)OQK(OAU42Wz;T5{0gC8N&PD5SJS>m#IJ?En)(`{ucdY!;@1m(9rYW?_2iA@P2|lu
zZiBJ)`Q2jdZm&bXjXAfA_(p1XAbuz9O(K35wYw3&N9gxbzmL2h@ds!>DDobnwi)q<
zX+I+3k3xS;_>VJZ3-qnV?#}t1@&xwDW1&CEob9lGM0rXa|1|t(P)$(+AXhb5_}roe}w>PkhMirP*i)5-Q^2jpeY&NS5d
z9iexk-dX5fES4TI;det`cj5QoF+Is%i1()5hwN*p+v$gRf9eB-J`nmK>VwH4B0iMb
zFyvWj4<|>EBgs+ZXhYrZ7+eSDfwr?y$9$A=^vA=VN_zr15n8sf^)WW;lSn(_4iR_4
zcQNJ`x(B*f_&)f4<^_Zvq;>$}2MT>M{DT-fm^?(pbEr+RSbE|*)Vb>O&Bbwf!q10a
zApAo3MZz!UaV2Ca;$^g_k<$%z{WGY|BxfPNT*NEj&t`0n(C0!wl=?jKuw=aFmDCPL
z-U5^*^pAjDN&84LWT?-l3h`>{HDp-CYZ0%bUN7_r^o7(H3B7?@BN;^==RxXXk#`jO
z9dkc5j?!dw?Ki`3N&5O6mx|+-A->%BsVjtjG__;MW0AK~#E+wXJoHsUKLP%U!as?5
zCzGcjek$$LMBeGp&!B!Ld6r=>bx+T>STxTu)YoyI#nNsMV&|jWz+)~TFNAgx?TZcd
zF_%!g6!FW1emV6kpeOGARU&>h^lOBFE%R5CYsj_8yN>qtu-6&8*IhVo6nzfsq5p((
zBgz((n~bhwH^bk+yjz5RE4ACm+sTc{zk~Lj!?&n?P
z2Z(=2`y=vWL!I{t;-6CAO=1kU*7F(Sp9}vB_+K*bE1~bD_BHtp`7QFlqy0VXAB^2w
zwfkez*5~pw=8w9jU%)8%D|iryYiidW{GIjuLH=o|kNwNo?JUSmN$TxVsUx8B+NF`{
zWP7p$Vj0HPd71D#GS-RYIBM5L|V5cC-d6%q1Kn|hrIqG
zJ^=ne#s&#}Fts7%P~;7xZ54ULsf|E&OH0=8s{TR8JJc^7P>V9mZ){J^vXfGj`io9jimXj-xe>Cl5$YTw4y(?CR@8}?KEc`C|k>Zg&XLpy`^nTGoKv#6bo_&Gv9m->0o&!>HXh+jzU
zBE&Bi`X%r$W$ZHYa`FoDO7bd0eI8fixNC%et+D&v0eub1eLQ9@c^!E@xsJR6x$BLs
z>%EcsP2|mpZ=ijP$h#H#ZPafU`bKK%Jnj_wChB)VzuRKz-48JcKA;T|7qqu1N~W{KL`JL
z#$FKmi?|j!K3_)OE6jV9+(EuZzD~YjsL$g~9QT&+-)7D`au<&Oz}ULJ
z52=3y{bQkjg7f_X+NUVHjjnusOk#e2j-Sp4QB%2J!tVfnCFJQU2iVx
z;W?(|p}u_h=)1H66wJM}Lgp2b#bgO$QQD=j%VPL{3Lpgdb*3E%Z8}*He#>3lU$0(n!C-
za3K0l6)N|Ipn$IdF1)z1>}X~MdZcgB_!s&KG(|(wS76x
z^9t%$l2?&elh=^flB>xz}qgypg<#yqVlU-a_6=-bUU|ZY1v@?<6;o
zcae9K_mKCJ_mTIL50DR%50RV6hsj6CN6E*?$H^_^R&pEp1o-P3FHvzKy&;X}?3h3vDOu_YC#1??c~3
z{R8qt5&sDBkA?pU{7;3y8~z?1_Zj&)`Gv^)5_w-y-z)U5seOa^x3s?_zbAh%)aUyn
z@_w>d26YC1CVwG+C4VD-C;u>1$E)LXJ%3>>w0Lx#SH`*_ue;EDQ11!77s>#X-Z2SNQ#?_cv5|s-zD@8I;u0
z2g4sC{Gp-_tI<_|qz^|KVf^%wi;&kqyOE5NF+*Md
zV&okq{5bq3;Wt|>Lx%CVC5WAYvXuTZayhxeP}h4j@{XZ?EV+_Ajy#@RWvGukf!c}W
zNvP*!+NX%TQ>mRso^CkgM&zF<^3G!XZ0P3*{apB%ue#0ig?<6_3x$6X{EL})37Ht5
zmx;W~k#_~-R|@?qYFAqo=jE
zn`v(_)cP&dZbkeyq2Eq@BlJ6rJv4FrCdTd}?Z9;zn`jf)n4*x0UJxxACJ}dH`L*Dbke}OqKk}o0tGVNDH
z-mB1eP=AelUBus@_NK)$%t`xg@*VPBLw&tFsl7+OkNS6s_y_Pm6#hrd`*
zCijq^8R~XEN8T64ABKCZwXZCe4wzr*d%+RZzJ~ve#nS#y`rpF;j`sKD59E*JPvpNbAG@xQ_EPycuLf6!(dI`1!|wYMNQg?6f;innivk|z9g=HP(#9cX8enPf+@
z6WN*ULUuLO^>;%(-KqBwdQWP-$lhchXzl!1&7GSux1hCYP)P;!`v
zTM-{FeBPJ#Bbhr2@zEka2L4#avdD2FJ|6K2!k-8~TlhBklX#q+bdVgs`n+A#+=kY3
zah#W$kMu)}paf86q6Ceuk3RtZfy|ps9z-52@(w{>4)rPIR5DlOY{>Ck5wdwAkr&tj~ctRQC_>f`57n@b*wdgh7vVescOb~w2}#E+m>
ziM%6eheW&zdbRLt;D?!4OV*L~$-Lp}{w$=vD5nSCYqxyyK~@vRFo>(>@W$Y(+VV{>kJiR*op&+vE}?!Yc^P>*Vrx*Y5cyX!
zb`|uig?N95g$y!)u%FZ2hfJxD%8ZbtsYv>!3l?L7+pF_bMRk6SDw
zIW`p&cKdBS?g`|4fbt~$?V_Hipg&Fh8S+{3Ir4e(1@cAmCGutR74lU>eV#k0y@qyP
z7y29U-xU5^@ZVS;nxhu&WJI9TUrkeP^gq}_?^Om-o=8tUV^QR`0jAbXO%$lhch)ZdqO
zKSNzlf9L~*KM?*P;SYvCgvSjfhmlrtxS^_V6vj`7ktm0ujADE=IffidW|8B_@#F+@
zqM<&|Y-%>tJBhZPbdXL%o$o@Po4SYel0MQ;2FReHKK=k4e;`T;%4Eh5A`d1H5&1dP
zrXYW+&~xGEF_tg%0&0bb7YV)C*rUVHONCwrf12>88@s~{=rd{0BFo7Na<-wqt~tn`
zOZ`xCUNSx!_fO}|Cl5zH$9;z*s8y0jk|DB+tR`znT#Kqx*ISGF>S))K5pp59h-@Gm
z$tW2!)b%Y!eMeD`3%vBhES52e
zemW7kmry^6JQ><4v`-~ZBTpyKFx1y|CbhH3v&nPFbIJ2i@A<~o$6WybLdGr<`o+c`
z^C;q%qCAUo8Onnwmoxtg@=Ee5@@hj}|25RE#c`{JzJ~f*=-1J{o?J)XK&~fmByS>b
zHq`BGpmqy+D|s7vJGqg(1MS|4axeW&aIxU+UOnLVq0k7U6GY&Nk>z(0)?Hw^Mrx@uz7&V>p)gtiyBEpC?}+UnE~5UnXB6
zUqziiqU#XS5^J$aJ#3p{hTl0}3iow)Qj0j$|jYGuegg
zisQOjELn@e?qmF1LLWFd|(GPbU-
zn0g8HQlXc@pT^j9at4{Gdlvn2vVxp#sOz6YZ7z8z>YYdXFmgV5xS>970ktELUrGB&
zG9>b?I;;D)cI;293}iXbDGFz
zvV~kiF2!-njIHZgZtU?Ks~JZlevF783xB2XkAr`_@K?bIl6
z6Yzry$#SCUtW
z_|?>|F&w{({%RbzhW1+6PoZ3gau2oZVXvcogQ4#4_0Vq={!PrencP6$Lf%T=Mq*55
zY&5p6?+)sBlAFl8$h!?EaPDT@OZ`67b3g3|40U}ELVt+*W}!b!?GeNu75ZcFA7^Zf
z(6>V0Cj2Lu^CY?5Vwu3c)$Kk_{TUqhtT^sD>d%uekS~%iA^&A#>v~?H{wlcx@z-d-
zPQF3DNxntCZK&&c$Ji5o!|^*Af6rpcM7Ve=l>sCch!SCBGxTCx5{4Khpk*{F(g4P_;909`e9U%#DoS
z8UF*mny-J-{|o*hC>E4aM$1egQ=zppwyG~P4JBRp?csM2eg=G0q;orxokYAd;$5hB
z6?!*n-4X9WyQhfvqShPnKD7Ii{mA}?y6pkT8z}ri%oz-Q2<@TdFw#m6Cr6MY4JYy*
zWR5nzZf6X0#*$g6f1HSqr#=DtM4@L>w?Uso+fF)2C+Q;HhPqu3j`LFYk$y5j1`T!o
z0n`p8C*$~oMEqc5XRm;sBlIcor=o;;TrQbM<|9@>yO1n0)a@1{UP8T;EEDl*)TWa&
z$eH9UvYf0yeY1_N>z_k?F7!i%j&ZN!he4k&^uysVVC)F8l01?OkyT_hS!0;}hM}&%
z7WLH$zn-}f=nI9u2!4a`8{tQp8zUEs_)&<*sW*|$WQ)jKg1n{FmkE72HQY~qo<}3^
z7~02*yp`0BL;QG)#fCOASCJ=>Cz2;ooWO%uVE7w$iJ8N
zedPV*1LT9`L*!=iVe%0}bsr}sj(v==$5H>5WIS^#{B6R20{)Z2-wyvN;XiHc%xB1F
z$>)&wJna|A7Y+4zeF^cGh5w4hVjsY~9pr1|>m=u8=9}=tIQ}hgIQTZ$1$@Wk>H6P=
zzti}c?~(75yF}gx)IKCX!f_wd{)GIL+)eH=)a`tR{Lh8|1^h3W_Z7L9{F?lR{FeOA
zP}lc8wI9eIQQuFreK#{;ofTgj{HA_{wMsul72@E?G!MTb~_PIgPl&lJ=uZG
zATvo+W?v4w6WN)>xoBOTQ&*I3%vXMQlpgeZlD){@q{{01`HoT%&VI1ptJ
zUx0Wt_4AmRtYo-F)>Xdet7LOX|?LaKJBqT~uc4|cxr
z3t$)0FA{n&>=OE=WEnY)oKDUlReLj0X3;MvD@YZejWUP+T=GzI9(fo!pH#;kPJ01(
z1Z~xaDt4ry;}I}qv1nGot~Pqd8WC6dwJ0y6)S;Y>Qjena2+Gm47m|y>2HGm8k#-b}
z34Jl`qsTbfL^hKx*&jZ)cJ|Db*wmOdsQ7)o?F?k88;+LXa
zM*ni5UqSmy@+$Ib@*475axQCi}9ZltfYn`qxmZXj$5qr2i`E$_+ir6>JwA6qG~B#27GE^a
z3EyoCK=wPcrBkvv*3=wosH~3Fgs0{?=H|!4brCd_7p-duMQaM1BaPt|v1qs~uO%K^
z9G+y(W4AS>I5*s6EsI5>K$j`A%dPzj-42`AZuhvYdm)&QLYALMfZH0gerjE5{R%bbmxfy6;Re=Z%D@=Z2dS9gH*a`(0ks!5HuXyIqeRROEDcY)(u9htrJAS&-tYFR
znQh0(yRnu`o`;U{xNR7=PLD(NQmcbToNl`pQ@gM<)>sva*DSZLEaA+a6>AKmy{Q#p
zbzk$aKIP*~eF047{A`Q|jF`#u{4N_hSxvTpGrQGsVnM3Rb9nrIjL3q5P(!3z4gJIt
zSQe_m1*xvjX%TbGjU~?Rb778|aqo2c-I&)4ECEb1ta?s2)-AU;dmmRd6!6<|!(zpA
zLrwKq=hV$yIpQbOUofLS7LG=iB_^@UhH>t~jX(nqtaCoMU5`suqaSx6fbr=w4bU}L
z;>U7>1`2ZG5iI4nlGat04WO;UDPhdvB`sX~3UDEgz`{)QGIx!y2I0Ohf!6)?+o~GhkZ_Tyd&VnZE)DJHlT}K
z7*41&>0tGC`JGM&ZgD|D%fe7R)NEZ@n$r{sO)7~sEDAM;aOy=f>UTZ3-deh4Gg_IW
zTe0K*dR%H2IP4x<09T+NMC#e&$8c8j8(kgR>s?7neJExv$5f8ip~;FuTo~?$%Y*;)&~@6}F3c}n7$%4x0~d>k8x6KP
zs;itXhYQ!0TcIy%?1`wjcxo(yaoik><2H=huBvs|@I*yj=r6A=;PpCi8|LXXTR%}<
z=r6T^YX`TeGT-aQO;N+CLM=78lA0Fl%CS9BdtOCs(Q-8zif35+7oe}bel;749C#pk
z16Y*x4b}?}29(cj^WvO^>$6)cF)tn2ey<1jILlIOU0JN}g{~Maps6;HgFGGJ=}STx)`#ba6HS!HS|XSrYM$#Risr`R)mYBMaWv2eW6FcB@dwndrVv{}r&IN|
zI)AUti5um1+uXP@!o&GbojVI>Z(V{aPerd3O^Jt^w%n@vt;>`tv(yd2*!KAXW`}{b
zBY-(>77VQ0xZMs9YIk^0uW+$dD)QlW=sS*?mmjVP<8D}UBe*M(YE1Q+iCsW>C{{Tm
z)Y70ggy_U@Bpy@yPByL>nBnkJ07)IZdiD_V>VcC=1hVgPs6kG{r@$D_pKN(^cB#s-&(avY`raBo7d;}>dlCH_<3A@>>N-xE({x8Y<8WvC&JTr
z#NB%KS?wZ;GcG9$FN;*itW)tWg!;=4Rcj;e2cEHJBH~<~KCB07qWIN=7AqoF8FkZz
zt2aG(;c#LJ$VzW7wwb2>N>#s__A@Yl&^v5y26lx7Si@cFfy=ELZiX7*sSfp0g*O28
z@_`3c_I^GlCfc)OV&+ecHEh|0XZy6opf-)5xvbQS*lAg*>#XCmQtvP|V*`uT02ih@
z3pGpMZ>`Kq-HFRc{VdT*#Zy}v>RUq9Th?HIY5E8&k{^RZZ{7la+zza`s%~uTu|8ta
zb>Xb-p6va6oVDA97f3vI3$b@=sKa(PEBy$ZYJP60p#ekKydYt?V0(`zzFG_MV72Qh
zG1qCwMjY>Acon0GTcP{j=Rr%>SYAbV87eUgRNnNa(4xdcqYx{*H{kN&^#Lop+MD6s
z5oer&zQs!y3i>$^$d)diNf?wKw+l}WFM2ww{RfFbsUA}1ftO{Oi=MTFP}fkk2-NO{j;Tc{t^jMew9eEZ%AvbUXE!#H4Wq
zut=cAuDWuM2QAuz_Mn=e!JywXxt|(F-@9Dul47GC<)*oybUC_6E#On8O~+8g8!&d^
zSilR=DPE`EW)9O$xV>&q(Czd(g4hKlE-ESh%?0V}!*kK$(i`aEe1ldfde3
zOv7tky>%9L!QpK62C}%`Jew0cZ@e)yM4}jl({h?Zb)n^<>_)6=s`K@prV!%=FUop2
z>P?Nyr@N-FKDCguCY(4cZM**e-IZrmh%JlSim0U`dAEyl8r7q;1{*}&j8gMBMLTwM
zMS&Y{N&&BWJ5o)$f=;*JVVb1<4^8IcwqyL3nSM}j)%XOH{8-W2G&2;!-3zPw6Wp^$q{s8v;?t~R0o|QGc5L3|KB+0u*mn}8;;t3&&-Lp
zL_^iMA=ns)YQjzR+*Ov8gyLH_G|E%KCk(eM;7}V3^=xwbv0dEfRPfU22v*nbcPdod
zJQa`KqdyfDRbVrS2eW#f>5Xl{l*q!+;+8~DIIs|58-o|ILLUYMR=oXs!i8?a^xv;}
zs%Y!XPCs!b&_daEzRZXF^`4RygB%=zxUvQi(YVH#Z;43
zEq!jRcut&1!0ikN-L0LlkJx4(xUn#)XIV*Es2)u<)noU}UdZaSmOYTwX(LwJP7mp6
zfcNvDD-_n#pw<3|tIq252j*PoG_0_l)%O{EHE9Y(lQ-C@-aYX;tu74@7QY)`j?DN9
z1aWzoYt?pV(1%slboIV!n-}M_yVR$`SlR4m^y7ZReEJMrZ^PDAc$sZ%!B@CMFXL2v
zUWYmrzuRUHc=Q{&nsusCuQTWj`$IuLcDcz@Nvdt03aWIfv6er*Cel=&=;D&X<=7x^
zT~)PpRZXIOwavjbo33zsy!&(o9yV^L!xaoy%l4CMo9(NYBsK7|I`vGJiWji;zzjqEIoKQXQ6;)4?Up35sf
zGrBN|OtYT-n#FR4?~L}Kcb|?6<$a{Kx}wl)cVY{LI~dFAxgfD&o>mqL
z<6~`0c6I%>4cpFcZC!nf4LHmv9rae?P@iZ;>!Io(wj#Xo=7hA=X6yK62p`>0W2_**
zWl>8^J+aN6y^Yrbx4kM@jTerfJLCz5!il!_5!-5Q|JQ=QeI5=%SJnjmu39(RNmNgv
zt-U#SV*l@J~!yRFSA%+yCQ3z6e|T`pBXXeg))kR6oPucNi=~Me3I}
z7Z!H4wN`yti=9i*5vs0AyyhgO>i=*_g?4=R#RdjXw!*^LqDAhWoj3velp`{g~U&cK3tuCVZ;iL$$~G7>LM|9g3c-@#v%tw
ztKe&6&U2vTsd=xiu(dh!%S%?Dr_D=hR-c{NpY_?pX52w3|r6Aag2^oJdO{YHj0UI=Zqi}ytQ
z(5Ii#?RW6gL0i{^_iTqREUt+{TXp~aHMLcHAQ%c&sYSueVEoQ)gny^@P%wZGn4)$H
z|4QvHeD|mg*CuOE2>(v)u3*6BuHyHNY_I69%hl{jHNQ;s4_tR8J~v
zY_@1CH@r9$Z*B}n@m+gX;uBypj@7lOARbzRcepLL;>66CZS`XSwut7$)Q2ypPdya-
z;l$j*8VnPc3(qXK{HlUgu)L)KE1~}Kot1VI+s{h7
zA1g)LHg<(Oh@S#@;QP3JVK4PdkS
zaYtQrVYd3y5xn53n}Kh*0sYP0+zh<7S7CQw8?Ff@E;=c+dDUL0SFiiHA^B6oq4<_H
z;+KVdd~?D(^$PV%;xyGK=#S!%{(&KR7x1ek{vbe~5uQr;u~qL8hv{B-hHKn-&GNYt
zj|X)tl49FEQ=}iT>Jt+>Xi&2I2h2pnYFuUws7Ch>i0M^R)f%Yr>209i1GNhOG`R-c
zjrs>{#_b)jLp{UWs^1&*`?2pyENj?47@@5zNGzKFZT*3uM}6{1JkR`CHns2%b1Cb;
zt-m(t@r0eNi;o|>1TFlJ{Zm*G!|zNjO=>*mtMQ0Cj)$TX8`A=8tMIWwJ)j5UOB&vC
zu`zb}y+J&u^%su)w7*^R)c0NWh{l(s>DBn#u23|heq-jJE#NbQ{sW$@%yZGc`oNf#
zxf-J&bAuiQA$3!{xJ!5$(4(ML_@}GP`~ssO^E-@!j_uSaXsiAJzMeSoO4(XJg@3wB
z`_-SHi`{_!$x;0w6@FpH+q3#2h+hivyH7yQ`Nlx72Dpr*zlT{kvhMLOo
z;#hTkT54VD_HJc0*qu~`n!=UM%NK_iq(v4-qO~z=WN~F(7;~^1KbXg&l}(|>#SLLw
zV`EKK34SQ3jz<2oWBot0JaBRV

literal 0
HcmV?d00001


From fb482f863d5430702b19bd3dd23e9d8652f12ddd Mon Sep 17 00:00:00 2001
From: Keith Massey 
Date: Tue, 8 Oct 2024 18:15:37 -0500
Subject: [PATCH 71/85] Adding index_template_substitutions to the simulate
 ingest API (#114128)

This adds support for a new `index_template_substitutions` field to the
body of an ingest simulate API request. These substitutions can be used
to change the pipeline(s) used for ingest, or to change the mappings
used for validation. It is similar to the
`component_template_substitutions` added in #113276. Here is an example
that shows both of those usages working together:

```
## First, add a couple of pipelines that set a field to a boolean:
PUT /_ingest/pipeline/foo-pipeline?pretty
{
  "processors": [
    {
      "set": {
        "field": "foo",
        "value": true
      }
    }
  ]
}

PUT /_ingest/pipeline/bar-pipeline?pretty
{
  "processors": [
    {
      "set": {
        "field": "bar",
        "value": true
      }
    }
  ]
}

## Now, create three component templates. One provides a mapping enforces that the only field is "foo"
## and that field is a keyword. The next is similar, but adds a `bar` field. The final one provides a setting
## that makes "foo-pipeline" the default pipeline.
## Remember that the "foo-pipeline" sets the "foo" field to a boolean, so using both of these templates
## together would cause a validation exception. These could be in the same template, but are provided
## separately just so that later we can show how multiple templates can be overridden.
PUT _component_template/mappings_template
{
  "template": {
    "mappings": {
      "dynamic": "strict",
      "properties": {
        "foo": {
          "type": "keyword"
        }
      }
    }
  }
}

PUT _component_template/mappings_template_with_bar
{
    "template": {
      "mappings": {
        "dynamic": "strict",
        "properties": {
          "foo": {
            "type": "keyword"
          },
          "bar": {
            "type": "boolean"
          }
        }
      }
    }
}

PUT _component_template/settings_template
{
  "template": {
    "settings": {
      "index": {
        "default_pipeline": "foo-pipeline"
      }
    }
  }
}

## Here we create an index template  pulling in both of the component templates above
PUT _index_template/template_1
{
  "index_patterns": ["foo*"],
  "composed_of": ["mappings_template", "settings_template"]
}

## We can index a document here to create the index, or not. Either way the simulate call ought to work the same
POST foo-1/_doc
{
  "foo": "FOO"
}

## This will not blow up with validation exceptions because the substitute "index_template_substitutions"
## uses `mappings_template_with_bar`, which adds the bar field.
## And the bar-pipeline is executed rather than the foo-pipeline because the substitute
## "index_template_substitutions" uses a substitute `settings_template`, so the value of "foo"
## does not get set to an invalid type.
POST _ingest/_simulate?pretty&index=foo-1
{
  "docs": [
    {
      "_id": "asdf",
      "_source": {
        "foo": "foo",
        "bar": "bar"
      }
    }
  ],
  "component_template_substitutions": {
    "settings_template": {
      "template": {
        "settings": {
          "index": {
            "default_pipeline": "bar-pipeline"
          }
        }
      }
    }
  },
  "index_template_substitutions": {
    "template_1": {
      "index_patterns": ["foo*"],
      "composed_of": ["mappings_template_with_bar", "settings_template"]
    }
  }
}
```
---
 docs/changelog/114128.yaml                    |   5 +
 .../indices/put-index-template.asciidoc       |  11 +-
 .../ingest/apis/simulate-ingest.asciidoc      |  21 +
 .../test/ingest/80_ingest_simulate.yml        | 413 +++++++++++++++++-
 .../bulk/TransportSimulateBulkActionIT.java   |  64 ++-
 .../org/elasticsearch/TransportVersions.java  |   1 +
 .../action/bulk/BulkFeatures.java             |   8 +-
 .../action/bulk/BulkRequest.java              |   5 +
 .../action/bulk/SimulateBulkRequest.java      |  56 ++-
 .../bulk/TransportAbstractBulkAction.java     |  11 +-
 .../bulk/TransportSimulateBulkAction.java     |  19 +-
 .../MetadataIndexTemplateService.java         |  24 +-
 .../ingest/RestSimulateIngestAction.java      |   3 +-
 .../action/bulk/SimulateBulkRequestTests.java | 115 ++++-
 .../TransportSimulateBulkActionTests.java     |   4 +-
 .../ingest/SimulateIngestServiceTests.java    |   6 +-
 16 files changed, 699 insertions(+), 67 deletions(-)
 create mode 100644 docs/changelog/114128.yaml

diff --git a/docs/changelog/114128.yaml b/docs/changelog/114128.yaml
new file mode 100644
index 000000000000..721649d0d6fe
--- /dev/null
+++ b/docs/changelog/114128.yaml
@@ -0,0 +1,5 @@
+pr: 114128
+summary: Adding `index_template_substitutions` to the simulate ingest API
+area: Ingest Node
+type: enhancement
+issues: []
diff --git a/docs/reference/indices/put-index-template.asciidoc b/docs/reference/indices/put-index-template.asciidoc
index 772bd51afdce..36fc66ecb90b 100644
--- a/docs/reference/indices/put-index-template.asciidoc
+++ b/docs/reference/indices/put-index-template.asciidoc
@@ -85,6 +85,8 @@ include::{docdir}/rest-api/common-parms.asciidoc[tag=master-timeout]
 [[put-index-template-api-request-body]]
 ==== {api-request-body-title}
 
+// tag::request-body[]
+
 `composed_of`::
 (Optional, array of strings)
 An ordered list of component template names. Component templates are merged in the order
@@ -102,7 +104,7 @@ See <>.
 +
 .Properties of `data_stream`
 [%collapsible%open]
-====
+=====
 `allow_custom_routing`::
 (Optional, Boolean) If `true`, the data stream supports
 <>. Defaults to `false`.
@@ -117,7 +119,7 @@ See <>.
 +
 If `time_series`, each backing index has an `index.mode` index setting of
 `time_series`.
-====
+=====
 
 `index_patterns`::
 (Required, array of strings)
@@ -146,7 +148,7 @@ Template to be applied. It may optionally include an `aliases`, `mappings`, or
 +
 .Properties of `template`
 [%collapsible%open]
-====
+=====
 `aliases`::
 (Optional, object of objects) Aliases to add.
 +
@@ -161,7 +163,7 @@ include::{es-ref-dir}/indices/create-index.asciidoc[tag=aliases-props]
 include::{docdir}/rest-api/common-parms.asciidoc[tag=mappings]
 
 include::{docdir}/rest-api/common-parms.asciidoc[tag=settings]
-====
+=====
 
 `version`::
 (Optional, integer)
@@ -174,6 +176,7 @@ Marks this index template as deprecated.
 When creating or updating a non-deprecated index template that uses deprecated components,
 {es} will emit a deprecation warning.
 // end::index-template-api-body[]
+// end::request-body[]
 
 [[put-index-template-api-example]]
 ==== {api-examples-title}
diff --git a/docs/reference/ingest/apis/simulate-ingest.asciidoc b/docs/reference/ingest/apis/simulate-ingest.asciidoc
index ac6da515402b..1bee03ea3e58 100644
--- a/docs/reference/ingest/apis/simulate-ingest.asciidoc
+++ b/docs/reference/ingest/apis/simulate-ingest.asciidoc
@@ -102,6 +102,12 @@ POST /_ingest/_simulate
         }
       }
     }
+  },
+  "index_template_substitutions": { <3>
+    "my-index-template": {
+      "index_patterns": ["my-index-*"],
+      "composed_of": ["component_template_1", "component_template_2"]
+    }
   }
 }
 ----
@@ -109,6 +115,8 @@ POST /_ingest/_simulate
 <1> This replaces the existing `my-pipeline` pipeline with the contents given here for the duration of this request.
 <2> This replaces the existing `my-component-template` component template with the contents given here for the duration of this request.
 These templates can be used to change the pipeline(s) used, or to modify the mapping that will be used to validate the result.
+<3> This replaces the existing `my-index-template` index template with the contents given here for the duration of this request.
+These templates can be used to change the pipeline(s) used, or to modify the mapping that will be used to validate the result.
 
 [[simulate-ingest-api-request]]
 ==== {api-request-title}
@@ -225,6 +233,19 @@ include::{es-ref-dir}/indices/put-component-template.asciidoc[tag=template]
 
 ====
 
+`index_template_substitutions`::
+(Optional, map of strings to objects)
+Map of index template names to substitute index template definition objects.
++
+.Properties of index template definition objects
+[%collapsible%open]
+
+====
+
+include::{es-ref-dir}/indices/put-index-template.asciidoc[tag=request-body]
+
+====
+
 [[simulate-ingest-api-example]]
 ==== {api-examples-title}
 
diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml
index b4672b1d8924..18eb401aaa0f 100644
--- a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml
+++ b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_ingest_simulate.yml
@@ -371,7 +371,7 @@ setup:
           template:
             settings:
               index:
-                default_pipeline: "foo_pipeline"
+                default_pipeline: "foo-pipeline"
 
   - do:
       allowed_warnings:
@@ -523,7 +523,7 @@ setup:
           template:
             settings:
               index:
-                default_pipeline: "foo_pipeline"
+                default_pipeline: "foo-pipeline"
 
   - do:
       allowed_warnings:
@@ -807,3 +807,412 @@ setup:
   - match: { docs.0.doc._source.foo: "FOO" }
   - match: { docs.0.doc.executed_pipelines: ["foo-pipeline-2"] }
   - not_exists: docs.0.doc.error
+
+---
+"Test ingest simulate with index template substitutions":
+
+  - skip:
+      features:
+        - headers
+        - allowed_warnings
+
+  - requires:
+      cluster_features: ["simulate.index.template.substitutions"]
+      reason: "ingest simulate index template substitutions added in 8.16"
+
+  - do:
+      headers:
+        Content-Type: application/json
+      ingest.put_pipeline:
+        id: "foo-pipeline"
+        body:  >
+          {
+            "processors": [
+              {
+                "set": {
+                  "field": "foo",
+                  "value": true
+                }
+              }
+            ]
+          }
+  - match: { acknowledged: true }
+
+  - do:
+      cluster.put_component_template:
+        name: settings_template
+        body:
+          template:
+            settings:
+              index:
+                default_pipeline: "foo-pipeline"
+
+  - do:
+      cluster.put_component_template:
+        name: mappings_template
+        body:
+          template:
+            mappings:
+              dynamic: strict
+              properties:
+                foo:
+                  type: keyword
+
+  - do:
+      allowed_warnings:
+        - "index template [test-composable-1] has index patterns [foo*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [test-composable-1] will take precedence during new index creation"
+      indices.put_index_template:
+        name: test-composable-1
+        body:
+          index_patterns:
+            - foo*
+          composed_of:
+            - mappings_template
+
+  - do:
+      headers:
+        Content-Type: application/json
+      simulate.ingest:
+        index: foo-1
+        body: >
+          {
+            "docs": [
+              {
+                "_id": "asdf",
+                "_source": {
+                  "foo": "FOO"
+                }
+              }
+            ],
+            "component_template_substitutions": {
+              "settings_template": {
+                "template": {
+                  "settings": {
+                    "index": {
+                      "default_pipeline": null
+                    }
+                  }
+                }
+              }
+            },
+            "index_template_substitutions": {
+              "foo_index_template": {
+                "index_patterns":[
+                  "foo*"
+                ],
+                "composed_of": ["settings_template"]
+              }
+            }
+          }
+  - length: { docs: 1 }
+  - match: { docs.0.doc._index: "foo-1" }
+  - match: { docs.0.doc._source.foo: "FOO" }
+  - match: { docs.0.doc.executed_pipelines: [] }
+  - not_exists: docs.0.doc.error
+
+  - do:
+      indices.create:
+        index: foo-1
+  - match: { acknowledged: true }
+
+  - do:
+      headers:
+        Content-Type: application/json
+      simulate.ingest:
+        index: foo-1
+        body: >
+          {
+            "docs": [
+              {
+                "_id": "asdf",
+                "_source": {
+                  "foo": "FOO"
+                }
+              }
+            ],
+            "component_template_substitutions": {
+              "settings_template": {
+                "template": {
+                  "settings": {
+                    "index": {
+                      "default_pipeline": null
+                    }
+                  }
+                }
+              }
+            },
+            "index_template_substitutions": {
+              "foo_index_template": {
+                "index_patterns":[
+                  "foo*"
+                ],
+                "composed_of": ["settings_template", "mappings_template"]
+              }
+            }
+          }
+  - length: { docs: 1 }
+  - match: { docs.0.doc._index: "foo-1" }
+  - match: { docs.0.doc._source.foo: "FOO" }
+  - match: { docs.0.doc.executed_pipelines: [] }
+  - not_exists: docs.0.doc.error
+
+  - do:
+      headers:
+        Content-Type: application/json
+      simulate.ingest:
+        index: foo-1
+        body: >
+          {
+            "docs": [
+              {
+                "_id": "asdf",
+                "_source": {
+                  "foo": "FOO"
+                }
+              }
+            ],
+            "component_template_substitutions": {
+              "mappings_template": {
+                "template": {
+                  "mappings": {
+                    "dynamic": "strict",
+                    "properties": {
+                      "foo": {
+                        "type": "boolean"
+                      }
+                    }
+                  }
+                }
+              }
+            },
+            "index_template_substitutions": {
+              "foo_index_template": {
+                "index_patterns":[
+                  "foo*"
+                ],
+                "composed_of": ["settings_template", "mappings_template"]
+              }
+            }
+          }
+  - length: { docs: 1 }
+  - match: { docs.0.doc._index: "foo-1" }
+  - match: { docs.0.doc._source.foo: true }
+  - match: { docs.0.doc.executed_pipelines: ["foo-pipeline"] }
+  - not_exists: docs.0.doc.error
+
+---
+"Test ingest simulate with index template substitutions for data streams":
+  # In this test, we make sure that when the index template is a data stream template, simulate ingest works the same whether the data
+  # stream has been created or not -- either way, we expect it to use the template rather than the data stream / index mappings and settings.
+
+  - skip:
+      features:
+        - headers
+        - allowed_warnings
+
+  - requires:
+      cluster_features: ["simulate.index.template.substitutions"]
+      reason: "ingest simulate component template substitutions added in 8.16"
+
+  - do:
+      headers:
+        Content-Type: application/json
+      ingest.put_pipeline:
+        id: "foo-pipeline"
+        body:  >
+          {
+            "processors": [
+              {
+                "set": {
+                  "field": "foo",
+                  "value": true
+                }
+              }
+            ]
+          }
+  - match: { acknowledged: true }
+
+  - do:
+      cluster.put_component_template:
+        name: mappings_template
+        body:
+          template:
+            mappings:
+              dynamic: strict
+              properties:
+                foo:
+                  type: boolean
+
+  - do:
+      cluster.put_component_template:
+        name: settings_template
+        body:
+          template:
+            settings:
+              index:
+                default_pipeline: "foo-pipeline"
+
+  - do:
+      allowed_warnings:
+        - "index template [test-composable-1] has index patterns [foo*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [test-composable-1] will take precedence during new index creation"
+      indices.put_index_template:
+        name: test-composable-1
+        body:
+          index_patterns:
+            - foo*
+          composed_of:
+            - mappings_template
+            - settings_template
+
+  - do:
+      allowed_warnings:
+        - "index template [my-template-1] has index patterns [simple-data-stream1] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template-1] will take precedence during new index creation"
+      indices.put_index_template:
+        name: my-template-1
+        body:
+          index_patterns: [simple-data-stream1]
+          composed_of:
+            - mappings_template
+            - settings_template
+          data_stream: {}
+
+  # Here we replace my-template-1 with a substitute version that uses the settings_template_2 and mappings_template_2 templates defined in
+  # this request, and foo-pipeline-2 defined in this request.
+  - do:
+      headers:
+        Content-Type: application/json
+      simulate.ingest:
+        index: simple-data-stream1
+        body: >
+          {
+            "docs": [
+              {
+                "_id": "asdf",
+                "_source": {
+                  "@timestamp": 1234,
+                  "foo": false
+                }
+              }
+            ],
+            "pipeline_substitutions": {
+              "foo-pipeline-2": {
+                "processors": [
+                  {
+                    "set": {
+                      "field": "foo",
+                      "value": "FOO"
+                    }
+                  }
+                ]
+              }
+            },
+            "component_template_substitutions": {
+              "settings_template_2": {
+                "template": {
+                  "settings": {
+                    "index": {
+                      "default_pipeline": "foo-pipeline-2"
+                    }
+                  }
+                }
+              },
+              "mappings_template_2": {
+                "template": {
+                  "mappings": {
+                    "dynamic": "strict",
+                    "properties": {
+                      "foo": {
+                        "type": "keyword"
+                      }
+                    }
+                  }
+                }
+              }
+            },
+            "index_template_substitutions": {
+              "my-template-1": {
+                "index_patterns": ["simple-data-stream1"],
+                "composed_of": ["settings_template_2", "mappings_template_2"],
+                "data_stream": {}
+              }
+            }
+          }
+  - length: { docs: 1 }
+  - match: { docs.0.doc._index: "simple-data-stream1" }
+  - match: { docs.0.doc._source.foo: "FOO" }
+  - match: { docs.0.doc.executed_pipelines: ["foo-pipeline-2"] }
+  - not_exists: docs.0.doc.error
+
+  - do:
+      indices.create_data_stream:
+        name: simple-data-stream1
+  - is_true: acknowledged
+
+  - do:
+      cluster.health:
+        wait_for_status: yellow
+
+  - do:
+      headers:
+        Content-Type: application/json
+      simulate.ingest:
+        index: simple-data-stream1
+        body: >
+          {
+            "docs": [
+              {
+                "_id": "asdf",
+                "_source": {
+                  "@timestamp": 1234,
+                  "foo": false
+                }
+              }
+            ],
+            "pipeline_substitutions": {
+              "foo-pipeline-2": {
+                "processors": [
+                  {
+                    "set": {
+                      "field": "foo",
+                      "value": "FOO"
+                    }
+                  }
+                ]
+              }
+            },
+            "component_template_substitutions": {
+              "settings_template_2": {
+                "template": {
+                  "settings": {
+                    "index": {
+                      "default_pipeline": "foo-pipeline-2"
+                    }
+                  }
+                }
+              },
+              "mappings_template_2": {
+                "template": {
+                  "mappings": {
+                    "dynamic": "strict",
+                    "properties": {
+                      "foo": {
+                        "type": "keyword"
+                      }
+                    }
+                  }
+                }
+              }
+            },
+            "index_template_substitutions": {
+              "my-template-1": {
+                "index_patterns": ["simple-data-stream1"],
+                "composed_of": ["settings_template_2", "mappings_template_2"],
+                "data_stream": {}
+              }
+            }
+          }
+  - length: { docs: 1 }
+  - match: { docs.0.doc._index: "simple-data-stream1" }
+  - match: { docs.0.doc._source.foo: "FOO" }
+  - match: { docs.0.doc.executed_pipelines: ["foo-pipeline-2"] }
+  - not_exists: docs.0.doc.error
diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java
index 91674b7ce905..af99a0344e03 100644
--- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java
+++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java
@@ -59,7 +59,7 @@ public class TransportSimulateBulkActionIT extends ESIntegTestCase {
             }
             """;
         indicesAdmin().create(new CreateIndexRequest(indexName).mapping(mapping)).actionGet();
-        BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of());
+        BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of());
         bulkRequest.add(new IndexRequest(indexName).source("""
             {
               "foo1": "baz"
@@ -90,7 +90,7 @@ public class TransportSimulateBulkActionIT extends ESIntegTestCase {
     }
 
     @SuppressWarnings("unchecked")
-    public void testMappingValidationIndexExistsWithComponentTemplate() throws IOException {
+    public void testMappingValidationIndexExistsTemplateSubstitutions() throws IOException {
         /*
          * This test simulates a BulkRequest of two documents into an existing index. Then we make sure the index contains no documents, and
          * that the index's mapping in the cluster state has not been updated with the two new field. With the mapping from the template
@@ -122,16 +122,19 @@ public class TransportSimulateBulkActionIT extends ESIntegTestCase {
             .indexPatterns(List.of("my-index-*"))
             .componentTemplates(List.of("test-component-template"))
             .build();
-        TransportPutComposableIndexTemplateAction.Request request = new TransportPutComposableIndexTemplateAction.Request("test");
+        final String indexTemplateName = "test-index-template";
+        TransportPutComposableIndexTemplateAction.Request request = new TransportPutComposableIndexTemplateAction.Request(
+            indexTemplateName
+        );
         request.indexTemplate(composableIndexTemplate);
         client().execute(TransportPutComposableIndexTemplateAction.TYPE, request).actionGet();
 
         String indexName = "my-index-1";
         // First, run before the index is created:
-        assertMappingsUpdatedFromComponentTemplateSubstitutions(indexName);
+        assertMappingsUpdatedFromComponentTemplateSubstitutions(indexName, indexTemplateName);
         // Now, create the index and make sure the component template substitutions work the same:
         indicesAdmin().create(new CreateIndexRequest(indexName)).actionGet();
-        assertMappingsUpdatedFromComponentTemplateSubstitutions(indexName);
+        assertMappingsUpdatedFromComponentTemplateSubstitutions(indexName, indexTemplateName);
         // Now make sure nothing was actually changed:
         indicesAdmin().refresh(new RefreshRequest(indexName)).actionGet();
         SearchResponse searchResponse = client().search(new SearchRequest(indexName)).actionGet();
@@ -143,7 +146,7 @@ public class TransportSimulateBulkActionIT extends ESIntegTestCase {
         assertThat(fields.size(), equalTo(1));
     }
 
-    private void assertMappingsUpdatedFromComponentTemplateSubstitutions(String indexName) {
+    private void assertMappingsUpdatedFromComponentTemplateSubstitutions(String indexName, String indexTemplateName) {
         IndexRequest indexRequest1 = new IndexRequest(indexName).source("""
             {
               "foo1": "baz"
@@ -156,7 +159,7 @@ public class TransportSimulateBulkActionIT extends ESIntegTestCase {
             """, XContentType.JSON).id(randomUUID());
         {
             // First we use the original component template, and expect a failure in the second document:
-            BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of());
+            BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of());
             bulkRequest.add(indexRequest1);
             bulkRequest.add(indexRequest2);
             BulkResponse response = client().execute(new ActionType(SimulateBulkAction.NAME), bulkRequest).actionGet();
@@ -188,7 +191,42 @@ public class TransportSimulateBulkActionIT extends ESIntegTestCase {
                             )
                         )
                     )
-                )
+                ),
+                Map.of()
+            );
+            bulkRequest.add(indexRequest1);
+            bulkRequest.add(indexRequest2);
+            BulkResponse response = client().execute(new ActionType(SimulateBulkAction.NAME), bulkRequest).actionGet();
+            assertThat(response.getItems().length, equalTo(2));
+            assertThat(response.getItems()[0].getResponse().getResult(), equalTo(DocWriteResponse.Result.CREATED));
+            assertNull(((SimulateIndexResponse) response.getItems()[0].getResponse()).getException());
+            assertThat(response.getItems()[1].getResponse().getResult(), equalTo(DocWriteResponse.Result.CREATED));
+            assertNull(((SimulateIndexResponse) response.getItems()[1].getResponse()).getException());
+        }
+
+        {
+            /*
+             * Now we substitute a "test-component-template-2" that defines both fields, and an index template that pulls it in, so we
+             * expect no exception:
+             */
+            BulkRequest bulkRequest = new SimulateBulkRequest(
+                Map.of(),
+                Map.of(
+                    "test-component-template-2",
+                    Map.of(
+                        "template",
+                        Map.of(
+                            "mappings",
+                            Map.of(
+                                "dynamic",
+                                "strict",
+                                "properties",
+                                Map.of("foo1", Map.of("type", "text"), "foo3", Map.of("type", "text"))
+                            )
+                        )
+                    )
+                ),
+                Map.of(indexTemplateName, Map.of("index_patterns", List.of(indexName), "composed_of", List.of("test-component-template-2")))
             );
             bulkRequest.add(indexRequest1);
             bulkRequest.add(indexRequest2);
@@ -207,7 +245,7 @@ public class TransportSimulateBulkActionIT extends ESIntegTestCase {
          * mapping-less "random-index-template" created by the parent class), so we expect no mapping validation failure.
          */
         String indexName = randomAlphaOfLength(20).toLowerCase(Locale.ROOT);
-        BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of());
+        BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of());
         bulkRequest.add(new IndexRequest(indexName).source("""
             {
               "foo1": "baz"
@@ -254,7 +292,7 @@ public class TransportSimulateBulkActionIT extends ESIntegTestCase {
         request.indexTemplate(composableIndexTemplate);
 
         client().execute(TransportPutComposableIndexTemplateAction.TYPE, request).actionGet();
-        BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of());
+        BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of());
         bulkRequest.add(new IndexRequest(indexName).source("""
             {
               "foo1": "baz"
@@ -286,7 +324,7 @@ public class TransportSimulateBulkActionIT extends ESIntegTestCase {
         indicesAdmin().putTemplate(
             new PutIndexTemplateRequest("test-template").patterns(List.of("my-index-*")).mapping("foo1", "type=integer")
         ).actionGet();
-        BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of());
+        BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of());
         bulkRequest.add(new IndexRequest(indexName).source("""
             {
               "foo1": "baz"
@@ -340,7 +378,7 @@ public class TransportSimulateBulkActionIT extends ESIntegTestCase {
         client().execute(TransportPutComposableIndexTemplateAction.TYPE, request).actionGet();
         {
             // First, try with no @timestamp to make sure we're picking up data-stream-specific templates
-            BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of());
+            BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of());
             bulkRequest.add(new IndexRequest(indexName).source("""
                 {
                   "foo1": "baz"
@@ -366,7 +404,7 @@ public class TransportSimulateBulkActionIT extends ESIntegTestCase {
         }
         {
             // Now with @timestamp
-            BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of());
+            BulkRequest bulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of());
             bulkRequest.add(new IndexRequest(indexName).source("""
                 {
                   "@timestamp": "2024-08-27",
diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java
index 2095ba47ee37..78fddad603ca 100644
--- a/server/src/main/java/org/elasticsearch/TransportVersions.java
+++ b/server/src/main/java/org/elasticsearch/TransportVersions.java
@@ -237,6 +237,7 @@ public class TransportVersions {
     public static final TransportVersion DATE_TIME_DOC_VALUES_LOCALES = def(8_761_00_0);
     public static final TransportVersion FAST_REFRESH_RCO = def(8_762_00_0);
     public static final TransportVersion TEXT_SIMILARITY_RERANKER_QUERY_REWRITE = def(8_763_00_0);
+    public static final TransportVersion SIMULATE_INDEX_TEMPLATES_SUBSTITUTIONS = def(8_764_00_0);
 
     /*
      * STOP! READ THIS FIRST! No, really,
diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkFeatures.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkFeatures.java
index af1782ac1ade..78e603fba9be 100644
--- a/server/src/main/java/org/elasticsearch/action/bulk/BulkFeatures.java
+++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkFeatures.java
@@ -15,11 +15,17 @@ import org.elasticsearch.features.NodeFeature;
 import java.util.Set;
 
 import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_COMPONENT_TEMPLATE_SUBSTITUTIONS;
+import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_INDEX_TEMPLATE_SUBSTITUTIONS;
 import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_MAPPING_VALIDATION;
 import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_MAPPING_VALIDATION_TEMPLATES;
 
 public class BulkFeatures implements FeatureSpecification {
     public Set getFeatures() {
-        return Set.of(SIMULATE_MAPPING_VALIDATION, SIMULATE_MAPPING_VALIDATION_TEMPLATES, SIMULATE_COMPONENT_TEMPLATE_SUBSTITUTIONS);
+        return Set.of(
+            SIMULATE_MAPPING_VALIDATION,
+            SIMULATE_MAPPING_VALIDATION_TEMPLATES,
+            SIMULATE_COMPONENT_TEMPLATE_SUBSTITUTIONS,
+            SIMULATE_INDEX_TEMPLATE_SUBSTITUTIONS
+        );
     }
 }
diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java
index 558901f10229..f62b2f48fa2f 100644
--- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java
+++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java
@@ -23,6 +23,7 @@ import org.elasticsearch.action.support.WriteRequest;
 import org.elasticsearch.action.support.replication.ReplicationRequest;
 import org.elasticsearch.action.update.UpdateRequest;
 import org.elasticsearch.cluster.metadata.ComponentTemplate;
+import org.elasticsearch.cluster.metadata.ComposableIndexTemplate;
 import org.elasticsearch.common.Strings;
 import org.elasticsearch.common.bytes.BytesArray;
 import org.elasticsearch.common.bytes.BytesReference;
@@ -506,6 +507,10 @@ public class BulkRequest extends ActionRequest
         return Map.of();
     }
 
+    public Map getIndexTemplateSubstitutions() throws IOException {
+        return Map.of();
+    }
+
     record IncrementalState(Map shardLevelFailures, boolean indexingPressureAccounted) implements Writeable {
 
         static final IncrementalState EMPTY = new IncrementalState(Collections.emptyMap(), false);
diff --git a/server/src/main/java/org/elasticsearch/action/bulk/SimulateBulkRequest.java b/server/src/main/java/org/elasticsearch/action/bulk/SimulateBulkRequest.java
index 3cc7fa12733b..6fa22151396d 100644
--- a/server/src/main/java/org/elasticsearch/action/bulk/SimulateBulkRequest.java
+++ b/server/src/main/java/org/elasticsearch/action/bulk/SimulateBulkRequest.java
@@ -11,6 +11,7 @@ package org.elasticsearch.action.bulk;
 
 import org.elasticsearch.TransportVersions;
 import org.elasticsearch.cluster.metadata.ComponentTemplate;
+import org.elasticsearch.cluster.metadata.ComposableIndexTemplate;
 import org.elasticsearch.common.io.stream.StreamInput;
 import org.elasticsearch.common.io.stream.StreamOutput;
 import org.elasticsearch.common.xcontent.XContentHelper;
@@ -22,8 +23,8 @@ import java.util.HashMap;
 import java.util.Map;
 
 /**
- * This extends BulkRequest with support for providing substitute pipeline definitions and component template definitions. In a user
- * request, the substitutions will look something like this:
+ * This extends BulkRequest with support for providing substitute pipeline definitions, component template definitions, and index template
+ * substitutions. In a user request, the substitutions will look something like this:
  *
  *   "pipeline_substitutions": {
  *     "my-pipeline-1": {
@@ -72,6 +73,16 @@ import java.util.Map;
  *           }
  *         }
  *       }
+ *     },
+ *   "index_template_substitutions": {
+ *     "my-index-template-1": {
+ *       "template": {
+ *         "index_patterns": ["foo*", "bar*"]
+ *         "composed_of": [
+ *           "component-template-1",
+ *           "component-template-2"
+ *         ]
+ *       }
  *     }
  *   }
  *
@@ -82,6 +93,7 @@ import java.util.Map;
 public class SimulateBulkRequest extends BulkRequest {
     private final Map> pipelineSubstitutions;
     private final Map> componentTemplateSubstitutions;
+    private final Map> indexTemplateSubstitutions;
 
     /**
      * @param pipelineSubstitutions The pipeline definitions that are to be used in place of any pre-existing pipeline definitions with
@@ -89,14 +101,18 @@ public class SimulateBulkRequest extends BulkRequest {
      *                              parsed by XContentHelper.convertToMap().
      * @param componentTemplateSubstitutions The component template definitions that are to be used in place of any pre-existing
      *                                       component template definitions with the same name.
+     * @param indexTemplateSubstitutions The index template definitions that are to be used in place of any pre-existing
+     *                                       index template definitions with the same name.
      */
     public SimulateBulkRequest(
         @Nullable Map> pipelineSubstitutions,
-        @Nullable Map> componentTemplateSubstitutions
+        @Nullable Map> componentTemplateSubstitutions,
+        @Nullable Map> indexTemplateSubstitutions
     ) {
         super();
         this.pipelineSubstitutions = pipelineSubstitutions;
         this.componentTemplateSubstitutions = componentTemplateSubstitutions;
+        this.indexTemplateSubstitutions = indexTemplateSubstitutions;
     }
 
     @SuppressWarnings("unchecked")
@@ -108,6 +124,11 @@ public class SimulateBulkRequest extends BulkRequest {
         } else {
             componentTemplateSubstitutions = Map.of();
         }
+        if (in.getTransportVersion().onOrAfter(TransportVersions.SIMULATE_INDEX_TEMPLATES_SUBSTITUTIONS)) {
+            this.indexTemplateSubstitutions = (Map>) in.readGenericValue();
+        } else {
+            indexTemplateSubstitutions = Map.of();
+        }
     }
 
     @Override
@@ -117,6 +138,9 @@ public class SimulateBulkRequest extends BulkRequest {
         if (out.getTransportVersion().onOrAfter(TransportVersions.SIMULATE_COMPONENT_TEMPLATES_SUBSTITUTIONS)) {
             out.writeGenericValue(componentTemplateSubstitutions);
         }
+        if (out.getTransportVersion().onOrAfter(TransportVersions.SIMULATE_INDEX_TEMPLATES_SUBSTITUTIONS)) {
+            out.writeGenericValue(indexTemplateSubstitutions);
+        }
     }
 
     public Map> getPipelineSubstitutions() {
@@ -140,6 +164,18 @@ public class SimulateBulkRequest extends BulkRequest {
         return result;
     }
 
+    @Override
+    public Map getIndexTemplateSubstitutions() throws IOException {
+        if (indexTemplateSubstitutions == null) {
+            return Map.of();
+        }
+        Map result = new HashMap<>(indexTemplateSubstitutions.size());
+        for (Map.Entry> rawEntry : indexTemplateSubstitutions.entrySet()) {
+            result.put(rawEntry.getKey(), convertRawTemplateToIndexTemplate(rawEntry.getValue()));
+        }
+        return result;
+    }
+
     private static ComponentTemplate convertRawTemplateToComponentTemplate(Map rawTemplate) throws IOException {
         ComponentTemplate componentTemplate;
         try (var parser = XContentHelper.mapToXContentParser(XContentParserConfiguration.EMPTY, rawTemplate)) {
@@ -148,9 +184,21 @@ public class SimulateBulkRequest extends BulkRequest {
         return componentTemplate;
     }
 
+    private static ComposableIndexTemplate convertRawTemplateToIndexTemplate(Map rawTemplate) throws IOException {
+        ComposableIndexTemplate indexTemplate;
+        try (var parser = XContentHelper.mapToXContentParser(XContentParserConfiguration.EMPTY, rawTemplate)) {
+            indexTemplate = ComposableIndexTemplate.parse(parser);
+        }
+        return indexTemplate;
+    }
+
     @Override
     public BulkRequest shallowClone() {
-        BulkRequest bulkRequest = new SimulateBulkRequest(pipelineSubstitutions, componentTemplateSubstitutions);
+        BulkRequest bulkRequest = new SimulateBulkRequest(
+            pipelineSubstitutions,
+            componentTemplateSubstitutions,
+            indexTemplateSubstitutions
+        );
         bulkRequest.setRefreshPolicy(getRefreshPolicy());
         bulkRequest.waitForActiveShards(waitForActiveShards());
         bulkRequest.timeout(timeout());
diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportAbstractBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportAbstractBulkAction.java
index 8c6565e52daa..111e4d72c57c 100644
--- a/server/src/main/java/org/elasticsearch/action/bulk/TransportAbstractBulkAction.java
+++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportAbstractBulkAction.java
@@ -25,6 +25,7 @@ import org.elasticsearch.cluster.ClusterStateObserver;
 import org.elasticsearch.cluster.block.ClusterBlockException;
 import org.elasticsearch.cluster.block.ClusterBlockLevel;
 import org.elasticsearch.cluster.metadata.ComponentTemplate;
+import org.elasticsearch.cluster.metadata.ComposableIndexTemplate;
 import org.elasticsearch.cluster.metadata.Metadata;
 import org.elasticsearch.cluster.service.ClusterService;
 import org.elasticsearch.common.io.stream.Writeable;
@@ -183,7 +184,9 @@ public abstract class TransportAbstractBulkAction extends HandledTransportAction
         boolean hasIndexRequestsWithPipelines = false;
         final Metadata metadata;
         Map componentTemplateSubstitutions = bulkRequest.getComponentTemplateSubstitutions();
-        if (bulkRequest.isSimulated() && componentTemplateSubstitutions.isEmpty() == false) {
+        Map indexTemplateSubstitutions = bulkRequest.getIndexTemplateSubstitutions();
+        if (bulkRequest.isSimulated()
+            && (componentTemplateSubstitutions.isEmpty() == false || indexTemplateSubstitutions.isEmpty() == false)) {
             /*
              * If this is a simulated request, and there are template substitutions, then we want to create and use a new metadata that has
              * those templates. That is, we want to add the new templates (which will replace any that already existed with the same name),
@@ -197,6 +200,12 @@ public abstract class TransportAbstractBulkAction extends HandledTransportAction
                 updatedComponentTemplates.putAll(componentTemplateSubstitutions);
                 simulatedMetadataBuilder.componentTemplates(updatedComponentTemplates);
             }
+            if (indexTemplateSubstitutions.isEmpty() == false) {
+                Map updatedIndexTemplates = new HashMap<>();
+                updatedIndexTemplates.putAll(clusterService.state().metadata().templatesV2());
+                updatedIndexTemplates.putAll(indexTemplateSubstitutions);
+                simulatedMetadataBuilder.indexTemplates(updatedIndexTemplates);
+            }
             /*
              * We now remove the index from the simulated metadata to force the templates to be used. Note that simulated requests are
              * always index requests -- no other type of request is supported.
diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java
index 713116c4cf98..d7c555879c00 100644
--- a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java
+++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java
@@ -17,6 +17,7 @@ import org.elasticsearch.action.ingest.SimulateIndexResponse;
 import org.elasticsearch.action.support.ActionFilters;
 import org.elasticsearch.cluster.ClusterState;
 import org.elasticsearch.cluster.metadata.ComponentTemplate;
+import org.elasticsearch.cluster.metadata.ComposableIndexTemplate;
 import org.elasticsearch.cluster.metadata.IndexAbstraction;
 import org.elasticsearch.cluster.metadata.IndexMetadata;
 import org.elasticsearch.cluster.metadata.IndexTemplateMetadata;
@@ -73,6 +74,7 @@ public class TransportSimulateBulkAction extends TransportAbstractBulkAction {
     public static final NodeFeature SIMULATE_COMPONENT_TEMPLATE_SUBSTITUTIONS = new NodeFeature(
         "simulate.component.template.substitutions"
     );
+    public static final NodeFeature SIMULATE_INDEX_TEMPLATE_SUBSTITUTIONS = new NodeFeature("simulate.index.template.substitutions");
     private final IndicesService indicesService;
     private final NamedXContentRegistry xContentRegistry;
     private final Set indexSettingProviders;
@@ -119,11 +121,12 @@ public class TransportSimulateBulkAction extends TransportAbstractBulkAction {
             : "TransportSimulateBulkAction should only ever be called with a SimulateBulkRequest but got a " + bulkRequest.getClass();
         final AtomicArray responses = new AtomicArray<>(bulkRequest.requests.size());
         Map componentTemplateSubstitutions = bulkRequest.getComponentTemplateSubstitutions();
+        Map indexTemplateSubstitutions = bulkRequest.getIndexTemplateSubstitutions();
         for (int i = 0; i < bulkRequest.requests.size(); i++) {
             DocWriteRequest docRequest = bulkRequest.requests.get(i);
             assert docRequest instanceof IndexRequest : "TransportSimulateBulkAction should only ever be called with IndexRequests";
             IndexRequest request = (IndexRequest) docRequest;
-            Exception mappingValidationException = validateMappings(componentTemplateSubstitutions, request);
+            Exception mappingValidationException = validateMappings(componentTemplateSubstitutions, indexTemplateSubstitutions, request);
             responses.set(
                 i,
                 BulkItemResponse.success(
@@ -153,7 +156,11 @@ public class TransportSimulateBulkAction extends TransportAbstractBulkAction {
      * @param request The IndexRequest whose source will be validated against the mapping (if it exists) of its index
      * @return a mapping exception if the source does not match the mappings, otherwise null
      */
-    private Exception validateMappings(Map componentTemplateSubstitutions, IndexRequest request) {
+    private Exception validateMappings(
+        Map componentTemplateSubstitutions,
+        Map indexTemplateSubstitutions,
+        IndexRequest request
+    ) {
         final SourceToParse sourceToParse = new SourceToParse(
             request.id(),
             request.source(),
@@ -167,7 +174,7 @@ public class TransportSimulateBulkAction extends TransportAbstractBulkAction {
         Exception mappingValidationException = null;
         IndexAbstraction indexAbstraction = state.metadata().getIndicesLookup().get(request.index());
         try {
-            if (indexAbstraction != null && componentTemplateSubstitutions.isEmpty()) {
+            if (indexAbstraction != null && componentTemplateSubstitutions.isEmpty() && indexTemplateSubstitutions.isEmpty()) {
                 /*
                  * In this case the index exists and we don't have any component template overrides. So we can just use withTempIndexService
                  * to do the mapping validation, using all the existing logic for validation.
@@ -222,6 +229,12 @@ public class TransportSimulateBulkAction extends TransportAbstractBulkAction {
                     updatedComponentTemplates.putAll(componentTemplateSubstitutions);
                     simulatedMetadata.componentTemplates(updatedComponentTemplates);
                 }
+                if (indexTemplateSubstitutions.isEmpty() == false) {
+                    Map updatedIndexTemplates = new HashMap<>();
+                    updatedIndexTemplates.putAll(state.metadata().templatesV2());
+                    updatedIndexTemplates.putAll(indexTemplateSubstitutions);
+                    simulatedMetadata.indexTemplates(updatedIndexTemplates);
+                }
                 ClusterState simulatedState = simulatedClusterStateBuilder.metadata(simulatedMetadata).build();
 
                 String matchingTemplate = findV2Template(simulatedState.metadata(), request.index(), false);
diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java
index abeb3279b7b5..2a2cf6743a87 100644
--- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java
+++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexTemplateService.java
@@ -1421,44 +1421,24 @@ public class MetadataIndexTemplateService {
      * Resolve the given v2 template into a collected {@link Settings} object
      */
     public static Settings resolveSettings(final Metadata metadata, final String templateName) {
-        return resolveSettings(metadata, templateName, Map.of());
-    }
-
-    public static Settings resolveSettings(
-        final Metadata metadata,
-        final String templateName,
-        Map templateSubstitutions
-    ) {
         final ComposableIndexTemplate template = metadata.templatesV2().get(templateName);
         assert template != null
             : "attempted to resolve settings for a template [" + templateName + "] that did not exist in the cluster state";
         if (template == null) {
             return Settings.EMPTY;
         }
-        return resolveSettings(template, metadata.componentTemplates(), templateSubstitutions);
+        return resolveSettings(template, metadata.componentTemplates());
     }
 
     /**
      * Resolve the provided v2 template and component templates into a collected {@link Settings} object
      */
     public static Settings resolveSettings(ComposableIndexTemplate template, Map componentTemplates) {
-        return resolveSettings(template, componentTemplates, Map.of());
-    }
-
-    public static Settings resolveSettings(
-        ComposableIndexTemplate template,
-        Map componentTemplates,
-        Map templateSubstitutions
-    ) {
         Objects.requireNonNull(template, "attempted to resolve settings for a null template");
         Objects.requireNonNull(componentTemplates, "attempted to resolve settings with null component templates");
-        Map combinedComponentTemplates = new HashMap<>();
-        combinedComponentTemplates.putAll(componentTemplates);
-        // We want any substitutions to take precedence:
-        combinedComponentTemplates.putAll(templateSubstitutions);
         List componentSettings = template.composedOf()
             .stream()
-            .map(combinedComponentTemplates::get)
+            .map(componentTemplates::get)
             .filter(Objects::nonNull)
             .map(ComponentTemplate::template)
             .map(Template::settings)
diff --git a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestAction.java b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestAction.java
index 6de15b0046f1..680860332fe7 100644
--- a/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestAction.java
+++ b/server/src/main/java/org/elasticsearch/rest/action/ingest/RestSimulateIngestAction.java
@@ -76,7 +76,8 @@ public class RestSimulateIngestAction extends BaseRestHandler {
         Map sourceMap = XContentHelper.convertToMap(sourceTuple.v2(), false, sourceTuple.v1()).v2();
         SimulateBulkRequest bulkRequest = new SimulateBulkRequest(
             (Map>) sourceMap.remove("pipeline_substitutions"),
-            (Map>) sourceMap.remove("component_template_substitutions")
+            (Map>) sourceMap.remove("component_template_substitutions"),
+            (Map>) sourceMap.remove("index_template_substitutions")
         );
         BytesReference transformedData = convertToBulkRequestXContentBytes(sourceMap);
         bulkRequest.add(
diff --git a/server/src/test/java/org/elasticsearch/action/bulk/SimulateBulkRequestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/SimulateBulkRequestTests.java
index b6b1770e2ed5..c94e4e46c9ee 100644
--- a/server/src/test/java/org/elasticsearch/action/bulk/SimulateBulkRequestTests.java
+++ b/server/src/test/java/org/elasticsearch/action/bulk/SimulateBulkRequestTests.java
@@ -11,6 +11,7 @@ package org.elasticsearch.action.bulk;
 
 import org.elasticsearch.action.support.WriteRequest;
 import org.elasticsearch.cluster.metadata.ComponentTemplate;
+import org.elasticsearch.cluster.metadata.ComposableIndexTemplate;
 import org.elasticsearch.common.bytes.BytesArray;
 import org.elasticsearch.common.xcontent.XContentHelper;
 import org.elasticsearch.test.ESTestCase;
@@ -27,18 +28,27 @@ import static org.hamcrest.Matchers.instanceOf;
 public class SimulateBulkRequestTests extends ESTestCase {
 
     public void testSerialization() throws Exception {
-        testSerialization(getTestPipelineSubstitutions(), getTestTemplateSubstitutions());
-        testSerialization(getTestPipelineSubstitutions(), null);
-        testSerialization(null, getTestTemplateSubstitutions());
-        testSerialization(null, null);
-        testSerialization(Map.of(), Map.of());
+        testSerialization(getTestPipelineSubstitutions(), getTestComponentTemplateSubstitutions(), getTestIndexTemplateSubstitutions());
+        testSerialization(getTestPipelineSubstitutions(), null, null);
+        testSerialization(getTestPipelineSubstitutions(), getTestComponentTemplateSubstitutions(), null);
+        testSerialization(getTestPipelineSubstitutions(), null, getTestIndexTemplateSubstitutions());
+        testSerialization(null, getTestComponentTemplateSubstitutions(), getTestIndexTemplateSubstitutions());
+        testSerialization(null, getTestComponentTemplateSubstitutions(), null);
+        testSerialization(null, null, getTestIndexTemplateSubstitutions());
+        testSerialization(null, null, null);
+        testSerialization(Map.of(), Map.of(), Map.of());
     }
 
     private void testSerialization(
         Map> pipelineSubstitutions,
-        Map> templateSubstitutions
+        Map> componentTemplateSubstitutions,
+        Map> indexTemplateSubstitutions
     ) throws IOException {
-        SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(pipelineSubstitutions, templateSubstitutions);
+        SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(
+            pipelineSubstitutions,
+            componentTemplateSubstitutions,
+            indexTemplateSubstitutions
+        );
         /*
          * Note: SimulateBulkRequest does not implement equals or hashCode, so we can't test serialization in the usual way for a
          * Writable
@@ -49,7 +59,7 @@ public class SimulateBulkRequestTests extends ESTestCase {
 
     @SuppressWarnings({ "unchecked", "rawtypes" })
     public void testGetComponentTemplateSubstitutions() throws IOException {
-        SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(Map.of(), Map.of());
+        SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of());
         assertThat(simulateBulkRequest.getComponentTemplateSubstitutions(), equalTo(Map.of()));
         String substituteComponentTemplatesString = """
               {
@@ -83,7 +93,7 @@ public class SimulateBulkRequestTests extends ESTestCase {
             XContentType.JSON
         ).v2();
         Map> substituteComponentTemplates = (Map>) tempMap;
-        simulateBulkRequest = new SimulateBulkRequest(Map.of(), substituteComponentTemplates);
+        simulateBulkRequest = new SimulateBulkRequest(Map.of(), substituteComponentTemplates, Map.of());
         Map componentTemplateSubstitutions = simulateBulkRequest.getComponentTemplateSubstitutions();
         assertThat(componentTemplateSubstitutions.size(), equalTo(2));
         assertThat(
@@ -107,8 +117,70 @@ public class SimulateBulkRequestTests extends ESTestCase {
         );
     }
 
+    public void testGetIndexTemplateSubstitutions() throws IOException {
+        SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), Map.of());
+        assertThat(simulateBulkRequest.getIndexTemplateSubstitutions(), equalTo(Map.of()));
+        String substituteIndexTemplatesString = """
+              {
+                  "foo_template": {
+                    "index_patterns": ["foo*"],
+                    "composed_of": ["foo_mapping_template", "foo_settings_template"],
+                    "template": {
+                      "mappings": {
+                        "dynamic": "true",
+                        "properties": {
+                          "foo": {
+                            "type": "keyword"
+                          }
+                        }
+                      },
+                      "settings": {
+                        "index": {
+                          "default_pipeline": "foo-pipeline"
+                        }
+                      }
+                    }
+                  },
+                  "bar_template": {
+                    "index_patterns": ["bar*"],
+                    "composed_of": ["bar_mapping_template", "bar_settings_template"]
+                  }
+              }
+            """;
+
+        @SuppressWarnings("unchecked")
+        Map> substituteIndexTemplates = (Map>) (Map) XContentHelper.convertToMap(
+            new BytesArray(substituteIndexTemplatesString.getBytes(StandardCharsets.UTF_8)),
+            randomBoolean(),
+            XContentType.JSON
+        ).v2();
+        simulateBulkRequest = new SimulateBulkRequest(Map.of(), Map.of(), substituteIndexTemplates);
+        Map indexTemplateSubstitutions = simulateBulkRequest.getIndexTemplateSubstitutions();
+        assertThat(indexTemplateSubstitutions.size(), equalTo(2));
+        assertThat(
+            XContentHelper.convertToMap(
+                XContentHelper.toXContent(indexTemplateSubstitutions.get("foo_template").template(), XContentType.JSON, randomBoolean()),
+                randomBoolean(),
+                XContentType.JSON
+            ).v2(),
+            equalTo(substituteIndexTemplates.get("foo_template").get("template"))
+        );
+
+        assertThat(indexTemplateSubstitutions.get("foo_template").template().settings().size(), equalTo(1));
+        assertThat(
+            indexTemplateSubstitutions.get("foo_template").template().settings().get("index.default_pipeline"),
+            equalTo("foo-pipeline")
+        );
+        assertNull(indexTemplateSubstitutions.get("bar_template").template());
+        assertNull(indexTemplateSubstitutions.get("bar_template").template());
+    }
+
     public void testShallowClone() throws IOException {
-        SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(getTestPipelineSubstitutions(), getTestTemplateSubstitutions());
+        SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(
+            getTestPipelineSubstitutions(),
+            getTestComponentTemplateSubstitutions(),
+            getTestIndexTemplateSubstitutions()
+        );
         simulateBulkRequest.setRefreshPolicy(randomFrom(WriteRequest.RefreshPolicy.values()));
         simulateBulkRequest.waitForActiveShards(randomIntBetween(1, 10));
         simulateBulkRequest.timeout(randomTimeValue());
@@ -144,7 +216,7 @@ public class SimulateBulkRequestTests extends ESTestCase {
         );
     }
 
-    private static Map> getTestTemplateSubstitutions() {
+    private static Map> getTestComponentTemplateSubstitutions() {
         return Map.of(
             "template1",
             Map.of(
@@ -155,4 +227,25 @@ public class SimulateBulkRequestTests extends ESTestCase {
             Map.of("template", Map.of("mappings", Map.of(), "settings", Map.of()))
         );
     }
+
+    private static Map> getTestIndexTemplateSubstitutions() {
+        return Map.of(
+            "template1",
+            Map.of(
+                "template",
+                Map.of(
+                    "index_patterns",
+                    List.of("foo*", "bar*"),
+                    "composed_of",
+                    List.of("template_1", "template_2"),
+                    "mappings",
+                    Map.of("_source", Map.of("enabled", false), "properties", Map.of()),
+                    "settings",
+                    Map.of()
+                )
+            ),
+            "template2",
+            Map.of("template", Map.of("index_patterns", List.of("foo*", "bar*"), "mappings", Map.of(), "settings", Map.of()))
+        );
+    }
 }
diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java
index f4e53912d09a..71bc31334920 100644
--- a/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java
+++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java
@@ -135,7 +135,7 @@ public class TransportSimulateBulkActionTests extends ESTestCase {
 
     public void testIndexData() throws IOException {
         Task task = mock(Task.class); // unused
-        BulkRequest bulkRequest = new SimulateBulkRequest(null, null);
+        BulkRequest bulkRequest = new SimulateBulkRequest(null, null, null);
         int bulkItemCount = randomIntBetween(0, 200);
         for (int i = 0; i < bulkItemCount; i++) {
             Map source = Map.of(randomAlphaOfLength(10), randomAlphaOfLength(5));
@@ -218,7 +218,7 @@ public class TransportSimulateBulkActionTests extends ESTestCase {
          * (7) An indexing request to a nonexistent index that matches no templates
          */
         Task task = mock(Task.class); // unused
-        BulkRequest bulkRequest = new SimulateBulkRequest(null, null);
+        BulkRequest bulkRequest = new SimulateBulkRequest(null, null, null);
         int bulkItemCount = randomIntBetween(0, 200);
         Map indicesMap = new HashMap<>();
         Map v1Templates = new HashMap<>();
diff --git a/server/src/test/java/org/elasticsearch/ingest/SimulateIngestServiceTests.java b/server/src/test/java/org/elasticsearch/ingest/SimulateIngestServiceTests.java
index 332a04e40e43..3b3f5bdc747b 100644
--- a/server/src/test/java/org/elasticsearch/ingest/SimulateIngestServiceTests.java
+++ b/server/src/test/java/org/elasticsearch/ingest/SimulateIngestServiceTests.java
@@ -65,7 +65,7 @@ public class SimulateIngestServiceTests extends ESTestCase {
         ingestService.innerUpdatePipelines(ingestMetadata);
         {
             // First we make sure that if there are no substitutions that we get our original pipeline back:
-            SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(null, null);
+            SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(null, null, null);
             SimulateIngestService simulateIngestService = new SimulateIngestService(ingestService, simulateBulkRequest);
             Pipeline pipeline = simulateIngestService.getPipeline("pipeline1");
             assertThat(pipeline.getProcessors(), contains(transformedMatch(Processor::getType, equalTo("processor1"))));
@@ -83,7 +83,7 @@ public class SimulateIngestServiceTests extends ESTestCase {
             );
             pipelineSubstitutions.put("pipeline2", newHashMap("processors", List.of(newHashMap("processor3", Collections.emptyMap()))));
 
-            SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(pipelineSubstitutions, null);
+            SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(pipelineSubstitutions, null, null);
             SimulateIngestService simulateIngestService = new SimulateIngestService(ingestService, simulateBulkRequest);
             Pipeline pipeline1 = simulateIngestService.getPipeline("pipeline1");
             assertThat(
@@ -103,7 +103,7 @@ public class SimulateIngestServiceTests extends ESTestCase {
              */
             Map> pipelineSubstitutions = new HashMap<>();
             pipelineSubstitutions.put("pipeline2", newHashMap("processors", List.of(newHashMap("processor3", Collections.emptyMap()))));
-            SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(pipelineSubstitutions, null);
+            SimulateBulkRequest simulateBulkRequest = new SimulateBulkRequest(pipelineSubstitutions, null, null);
             SimulateIngestService simulateIngestService = new SimulateIngestService(ingestService, simulateBulkRequest);
             Pipeline pipeline1 = simulateIngestService.getPipeline("pipeline1");
             assertThat(pipeline1.getProcessors(), contains(transformedMatch(Processor::getType, equalTo("processor1"))));

From e676f6bb588c84291a5ee59c2d7147dceabc1642 Mon Sep 17 00:00:00 2001
From: Panagiotis Bailis 
Date: Wed, 9 Oct 2024 08:48:13 +0300
Subject: [PATCH 72/85] Updating RRF license to Enterprise (#113100)

---
 .../java/org/elasticsearch/xpack/rank/rrf/RRFRankPlugin.java    | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankPlugin.java b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankPlugin.java
index ece08d1a3d55..9404d863f1d2 100644
--- a/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankPlugin.java
+++ b/x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRankPlugin.java
@@ -25,7 +25,7 @@ public class RRFRankPlugin extends Plugin implements SearchPlugin {
     public static final LicensedFeature.Momentary RANK_RRF_FEATURE = LicensedFeature.momentary(
         null,
         "rank-rrf",
-        License.OperationMode.PLATINUM
+        License.OperationMode.ENTERPRISE
     );
 
     public static final String NAME = "rrf";

From 276f3b8836c4b82bdc174d68e9fabcf83abc4f0a Mon Sep 17 00:00:00 2001
From: David Turner 
Date: Wed, 9 Oct 2024 06:59:38 +0100
Subject: [PATCH 73/85] Avoid leaking blackholed register ops in tests
 (#114287)

Today when we reboot a node in a test case derived from
`AbstractCoordinatorTestCase` we lose the contents of
`blackholedRegisterOperations`, but it's important that these operations
_eventually_ run. With this commit we copy these operations over into
the new node.
---
 .../cluster/coordination/AbstractCoordinatorTestCase.java     | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java
index 262dba80caa2..ddfa61b53a0a 100644
--- a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java
@@ -1250,7 +1250,7 @@ public class AbstractCoordinatorTestCase extends ESTestCase {
                     .roles(localNode.isMasterNode() && DiscoveryNode.isMasterNode(settings) ? ALL_ROLES_EXCEPT_VOTING_ONLY : emptySet())
                     .build();
                 try {
-                    return new ClusterNode(
+                    final var restartedNode = new ClusterNode(
                         nodeIndex,
                         newLocalNode,
                         (node, threadPool) -> createPersistedStateFromExistingState(
@@ -1263,6 +1263,8 @@ public class AbstractCoordinatorTestCase extends ESTestCase {
                         settings,
                         nodeHealthService
                     );
+                    restartedNode.blackholedRegisterOperations.addAll(blackholedRegisterOperations);
+                    return restartedNode;
                 } finally {
                     clearableRecycler.clear();
                 }

From c41f1e7bb5a10611646ec392f117776e6579c1b6 Mon Sep 17 00:00:00 2001
From: elasticsearchmachine
 <58790826+elasticsearchmachine@users.noreply.github.com>
Date: Wed, 9 Oct 2024 17:05:11 +1100
Subject: [PATCH 74/85] Mute
 org.elasticsearch.xpack.esql.qa.single_node.RestEsqlIT
 testProfileOrdinalsGroupingOperator {SYNC} #114380

---
 muted-tests.yml | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/muted-tests.yml b/muted-tests.yml
index 5ab607fab209..0ee6b21cedca 100644
--- a/muted-tests.yml
+++ b/muted-tests.yml
@@ -371,6 +371,9 @@ tests:
 - class: org.elasticsearch.xpack.security.CoreWithSecurityClientYamlTestSuiteIT
   method: test {yaml=cluster.stats/30_ccs_stats/cross-cluster search stats search}
   issue: https://github.com/elastic/elasticsearch/issues/114371
+- class: org.elasticsearch.xpack.esql.qa.single_node.RestEsqlIT
+  method: testProfileOrdinalsGroupingOperator {SYNC}
+  issue: https://github.com/elastic/elasticsearch/issues/114380
 
 # Examples:
 #

From f79705d9b6c528a96cb5badaa251377525cbd628 Mon Sep 17 00:00:00 2001
From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com>
Date: Wed, 9 Oct 2024 10:22:39 +0300
Subject: [PATCH 75/85] Skip storing ignored source for single-element leaf
 arrays (#113937)

* Minimize storing array source

* restrict to fields

* revert changes for `addIgnoredFieldFromContext`

* fix test

* spotless

* count nulls
---
 .../index/mapper/DocumentParser.java          | 39 ++++++++++++++-----
 .../index/mapper/DocumentParserContext.java   |  6 +++
 .../mapper/IgnoredSourceFieldMapperTests.java | 32 +++++++++++++++
 .../index/mapper/MapperTestCase.java          |  6 ++-
 4 files changed, 73 insertions(+), 10 deletions(-)

diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java
index 19bd4f9980ba..0ff754d95393 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java
@@ -799,17 +799,30 @@ public final class DocumentParser {
         String fullPath = context.path().pathAsText(arrayFieldName);
 
         // Check if we need to record the array source. This only applies to synthetic source.
+        boolean canRemoveSingleLeafElement = false;
         if (context.canAddIgnoredField()) {
-            boolean objectRequiresStoringSource = mapper instanceof ObjectMapper objectMapper
-                && (getSourceKeepMode(context, objectMapper.sourceKeepMode()) == Mapper.SourceKeepMode.ALL
-                    || (getSourceKeepMode(context, objectMapper.sourceKeepMode()) == Mapper.SourceKeepMode.ARRAYS
-                        && objectMapper instanceof NestedObjectMapper == false));
-            boolean fieldWithFallbackSyntheticSource = mapper instanceof FieldMapper fieldMapper
-                && fieldMapper.syntheticSourceMode() == FieldMapper.SyntheticSourceMode.FALLBACK;
-            boolean fieldWithStoredArraySource = mapper instanceof FieldMapper fieldMapper
-                && getSourceKeepMode(context, fieldMapper.sourceKeepMode()) != Mapper.SourceKeepMode.NONE;
+            Mapper.SourceKeepMode mode = Mapper.SourceKeepMode.NONE;
+            boolean objectWithFallbackSyntheticSource = false;
+            if (mapper instanceof ObjectMapper objectMapper) {
+                mode = getSourceKeepMode(context, objectMapper.sourceKeepMode());
+                objectWithFallbackSyntheticSource = (mode == Mapper.SourceKeepMode.ALL
+                    || (mode == Mapper.SourceKeepMode.ARRAYS && objectMapper instanceof NestedObjectMapper == false));
+            }
+            boolean fieldWithFallbackSyntheticSource = false;
+            boolean fieldWithStoredArraySource = false;
+            if (mapper instanceof FieldMapper fieldMapper) {
+                mode = getSourceKeepMode(context, fieldMapper.sourceKeepMode());
+                fieldWithFallbackSyntheticSource = fieldMapper.syntheticSourceMode() == FieldMapper.SyntheticSourceMode.FALLBACK;
+                fieldWithStoredArraySource = mode != Mapper.SourceKeepMode.NONE;
+            }
             boolean copyToFieldHasValuesInDocument = context.isWithinCopyTo() == false && context.isCopyToDestinationField(fullPath);
-            if (objectRequiresStoringSource
+
+            canRemoveSingleLeafElement = mapper instanceof FieldMapper
+                && mode == Mapper.SourceKeepMode.ARRAYS
+                && fieldWithFallbackSyntheticSource == false
+                && copyToFieldHasValuesInDocument == false;
+
+            if (objectWithFallbackSyntheticSource
                 || fieldWithFallbackSyntheticSource
                 || fieldWithStoredArraySource
                 || copyToFieldHasValuesInDocument) {
@@ -829,20 +842,28 @@ public final class DocumentParser {
 
         XContentParser parser = context.parser();
         XContentParser.Token token;
+        int elements = 0;
         while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
             if (token == XContentParser.Token.START_OBJECT) {
+                elements = Integer.MAX_VALUE;
                 parseObject(context, lastFieldName);
             } else if (token == XContentParser.Token.START_ARRAY) {
+                elements = Integer.MAX_VALUE;
                 parseArray(context, lastFieldName);
             } else if (token == XContentParser.Token.VALUE_NULL) {
+                elements++;
                 parseNullValue(context, lastFieldName);
             } else if (token == null) {
                 throwEOFOnParseArray(arrayFieldName, context);
             } else {
                 assert token.isValue();
+                elements++;
                 parseValue(context, lastFieldName);
             }
         }
+        if (elements <= 1 && canRemoveSingleLeafElement) {
+            context.removeLastIgnoredField(fullPath);
+        }
         postProcessDynamicArrayMapping(context, lastFieldName);
     }
 
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java
index eebe95e260dc..ac236e5a7e5f 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java
@@ -296,6 +296,12 @@ public abstract class DocumentParserContext {
         }
     }
 
+    final void removeLastIgnoredField(String name) {
+        if (ignoredFieldValues.isEmpty() == false && ignoredFieldValues.getLast().name().equals(name)) {
+            ignoredFieldValues.removeLast();
+        }
+    }
+
     /**
      * Return the collection of values for fields that have been ignored so far.
      */
diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java
index 8c65424fb856..205ff08c397b 100644
--- a/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java
+++ b/server/src/test/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapperTests.java
@@ -532,6 +532,38 @@ public class IgnoredSourceFieldMapperTests extends MapperServiceTestCase {
             {"bool_value":true,"int_value":[10,20,30]}""", syntheticSource);
     }
 
+    public void testIndexStoredArraySourceSingleLeafElement() throws IOException {
+        DocumentMapper documentMapper = createMapperServiceWithStoredArraySource(syntheticSourceMapping(b -> {
+            b.startObject("int_value").field("type", "integer").endObject();
+        })).documentMapper();
+        var syntheticSource = syntheticSource(documentMapper, b -> b.array("int_value", new int[] { 10 }));
+        assertEquals("{\"int_value\":10}", syntheticSource);
+        ParsedDocument doc = documentMapper.parse(source(syntheticSource));
+        assertNull(doc.rootDoc().getField("_ignored_source"));
+    }
+
+    public void testIndexStoredArraySourceSingleLeafElementAndNull() throws IOException {
+        DocumentMapper documentMapper = createMapperServiceWithStoredArraySource(syntheticSourceMapping(b -> {
+            b.startObject("value").field("type", "keyword").endObject();
+        })).documentMapper();
+        var syntheticSource = syntheticSource(documentMapper, b -> b.array("value", new String[] { "foo", null }));
+        assertEquals("{\"value\":[\"foo\",null]}", syntheticSource);
+    }
+
+    public void testIndexStoredArraySourceSingleObjectElement() throws IOException {
+        DocumentMapper documentMapper = createMapperServiceWithStoredArraySource(syntheticSourceMapping(b -> {
+            b.startObject("path").startObject("properties");
+            {
+                b.startObject("int_value").field("type", "integer").endObject();
+            }
+            b.endObject().endObject();
+        })).documentMapper();
+        var syntheticSource = syntheticSource(documentMapper, b -> {
+            b.startArray("path").startObject().field("int_value", 10).endObject().endArray();
+        });
+        assertEquals("{\"path\":[{\"int_value\":10}]}", syntheticSource);
+    }
+
     public void testFieldStoredArraySourceRootValueArray() throws IOException {
         DocumentMapper documentMapper = createMapperService(syntheticSourceMapping(b -> {
             b.startObject("int_value").field("type", "integer").field(Mapper.SYNTHETIC_SOURCE_KEEP_PARAM, "arrays").endObject();
diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java
index ca26779f3376..eef7fc4e5008 100644
--- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java
@@ -1580,7 +1580,11 @@ public abstract class MapperTestCase extends MapperServiceTestCase {
         buildInput.accept(builder);
         builder.endObject();
         String expected = Strings.toString(builder);
-        assertThat(syntheticSource(mapperAll, buildInput), equalTo(expected));
+        String actual = syntheticSource(mapperAll, buildInput);
+        // Check for single-element array, the array source is not stored in this case.
+        if (expected.replace("[", "").replace("]", "").equals(actual) == false) {
+            assertThat(actual, equalTo(expected));
+        }
     }
 
     @Override

From b2903520d87e70c780f8d36348666da5bc5b35c5 Mon Sep 17 00:00:00 2001
From: elasticsearchmachine
 <58790826+elasticsearchmachine@users.noreply.github.com>
Date: Wed, 9 Oct 2024 18:47:41 +1100
Subject: [PATCH 76/85] Mute
 org.elasticsearch.xpack.inference.services.cohere.CohereServiceTests
 testInfer_StreamRequest #114385

---
 muted-tests.yml | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/muted-tests.yml b/muted-tests.yml
index 0ee6b21cedca..ef27eeeffc14 100644
--- a/muted-tests.yml
+++ b/muted-tests.yml
@@ -374,6 +374,9 @@ tests:
 - class: org.elasticsearch.xpack.esql.qa.single_node.RestEsqlIT
   method: testProfileOrdinalsGroupingOperator {SYNC}
   issue: https://github.com/elastic/elasticsearch/issues/114380
+- class: org.elasticsearch.xpack.inference.services.cohere.CohereServiceTests
+  method: testInfer_StreamRequest
+  issue: https://github.com/elastic/elasticsearch/issues/114385
 
 # Examples:
 #

From e221c0357eff9d311bd67b1a4b4f15e02b6cf030 Mon Sep 17 00:00:00 2001
From: Felix Barnsteiner 
Date: Wed, 9 Oct 2024 11:57:31 +0200
Subject: [PATCH 77/85] Add mappings for OTel event body (#114332)

Also changes mappings from body_* to body.*
---
 .../logs-otel@mappings.yaml                   | 28 ++++++---
 .../metrics-otel@mappings.yaml                |  2 +-
 .../component-templates/otel@mappings.yaml    |  6 +-
 .../semconv-resource-to-ecs@mappings.yaml     |  2 +-
 .../src/main/resources/resources.yaml         |  2 +-
 .../rest-api-spec/test/20_logs_tests.yml      | 61 ++++++++++++++++++-
 6 files changed, 85 insertions(+), 16 deletions(-)

diff --git a/x-pack/plugin/otel-data/src/main/resources/component-templates/logs-otel@mappings.yaml b/x-pack/plugin/otel-data/src/main/resources/component-templates/logs-otel@mappings.yaml
index 107901adb834..5f4dcbd41672 100644
--- a/x-pack/plugin/otel-data/src/main/resources/component-templates/logs-otel@mappings.yaml
+++ b/x-pack/plugin/otel-data/src/main/resources/component-templates/logs-otel@mappings.yaml
@@ -14,8 +14,7 @@ template:
       attributes:
         type: passthrough
         dynamic: true
-        priority: 10
-        time_series_dimension: true
+        priority: 20
         properties:
           exception.type:
             type: keyword
@@ -40,13 +39,28 @@ template:
       log.level:
         type: alias
         path: severity_text
-      body_text:
-        type: match_only_text
+      body:
+        type: object
+        properties:
+          text:
+            type: match_only_text
+          flattened:
+            # this is used for complex bodies of regular log records
+            # using the flattened field type avoids mapping issues which can be caused by logs containing arbitrary JSON objects
+            # the tradeoff is that the flattened field type is currently not supported well by Kibana and has other limitations
+            type: flattened
+          structured:
+            # this is used for events
+            # events are also represented as log records
+            # the event.name attribute uniquely identifies event structure / type of the payload (body)
+            # see also https://github.com/open-telemetry/semantic-conventions/blob/main/docs/general/events.md
+            # this makes them less prone to mapping issues, which is why we're enabling dynamic mappings
+            type: passthrough
+            dynamic: true
+            priority: 10
       message:
         type: alias
-        path: body_text
-      body_structured:
-        type: flattened
+        path: body.text
       trace_id:
         type: keyword
       trace.id:
diff --git a/x-pack/plugin/otel-data/src/main/resources/component-templates/metrics-otel@mappings.yaml b/x-pack/plugin/otel-data/src/main/resources/component-templates/metrics-otel@mappings.yaml
index 2f6aa7f6c916..37dd93b7f16d 100644
--- a/x-pack/plugin/otel-data/src/main/resources/component-templates/metrics-otel@mappings.yaml
+++ b/x-pack/plugin/otel-data/src/main/resources/component-templates/metrics-otel@mappings.yaml
@@ -10,7 +10,7 @@ template:
       metrics:
         type: passthrough
         dynamic: true
-        priority: 1
+        priority: 10
       unit:
         type: keyword
         time_series_dimension: true
diff --git a/x-pack/plugin/otel-data/src/main/resources/component-templates/otel@mappings.yaml b/x-pack/plugin/otel-data/src/main/resources/component-templates/otel@mappings.yaml
index fad85661203d..513e1a857787 100644
--- a/x-pack/plugin/otel-data/src/main/resources/component-templates/otel@mappings.yaml
+++ b/x-pack/plugin/otel-data/src/main/resources/component-templates/otel@mappings.yaml
@@ -20,7 +20,7 @@ template:
       attributes:
         type: passthrough
         dynamic: true
-        priority: 10
+        priority: 20
         time_series_dimension: true
       dropped_attributes_count:
         type: long
@@ -39,7 +39,7 @@ template:
           attributes:
             type: passthrough
             dynamic: true
-            priority: 20
+            priority: 30
             time_series_dimension: true
       resource:
         properties:
@@ -51,7 +51,7 @@ template:
           attributes:
             type: passthrough
             dynamic: true
-            priority: 30
+            priority: 40
             time_series_dimension: true
     dynamic_templates:
       - complex_attributes:
diff --git a/x-pack/plugin/otel-data/src/main/resources/component-templates/semconv-resource-to-ecs@mappings.yaml b/x-pack/plugin/otel-data/src/main/resources/component-templates/semconv-resource-to-ecs@mappings.yaml
index 87f97c7487be..6645e7d28252 100644
--- a/x-pack/plugin/otel-data/src/main/resources/component-templates/semconv-resource-to-ecs@mappings.yaml
+++ b/x-pack/plugin/otel-data/src/main/resources/component-templates/semconv-resource-to-ecs@mappings.yaml
@@ -11,7 +11,7 @@ template:
           attributes:
             type: passthrough
             dynamic: true
-            priority: 30
+            priority: 40
             time_series_dimension: true
             properties:
               host.name:
diff --git a/x-pack/plugin/otel-data/src/main/resources/resources.yaml b/x-pack/plugin/otel-data/src/main/resources/resources.yaml
index e32037901a49..52873287696a 100644
--- a/x-pack/plugin/otel-data/src/main/resources/resources.yaml
+++ b/x-pack/plugin/otel-data/src/main/resources/resources.yaml
@@ -1,7 +1,7 @@
 # "version" holds the version of the templates and ingest pipelines installed
 # by xpack-plugin otel-data. This must be increased whenever an existing template is
 # changed, in order for it to be updated on Elasticsearch upgrade.
-version: 4
+version: 5
 
 component-templates:
   - otel@mappings
diff --git a/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_logs_tests.yml b/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_logs_tests.yml
index fc162d0647d0..0957a79552ad 100644
--- a/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_logs_tests.yml
+++ b/x-pack/plugin/otel-data/src/yamlRestTest/resources/rest-api-spec/test/20_logs_tests.yml
@@ -11,7 +11,7 @@ setup:
         refresh: true
         body:
           - create: {}
-          - '{"@timestamp":"2024-07-18T14:48:33.467654000Z","data_stream":{"dataset":"generic.otel","namespace":"default"}, "attributes": { "foo": "bar"}, "body_text":"Error: Unable to connect to the database.","severity_text":"ERROR","severity_number":3,"trace_id":"abc123xyz456def789ghi012jkl345"}'
+          - '{"@timestamp":"2024-07-18T14:48:33.467654000Z","data_stream":{"dataset":"generic.otel","namespace":"default"}, "attributes": { "foo": "bar"}, "body":{"text":"Error: Unable to connect to the database."},"severity_text":"ERROR","severity_number":3,"trace_id":"abc123xyz456def789ghi012jkl345"}'
   - is_false: errors
   - do:
       search:
@@ -39,7 +39,8 @@ setup:
             attributes:
               foo: [3, 2, 1]
               bar: [b, c, a]
-            body_text: "Error: Unable to connect to the database."
+            body:
+              text: "Error: Unable to connect to the database."
             severity_text: ERROR
   - is_false: errors
   - do:
@@ -78,7 +79,7 @@ setup:
         refresh: true
         body:
           - create: {}
-          - '{"@timestamp":"2024-07-18T14:49:33.467654000Z","data_stream":{"dataset":"generic.otel","namespace":"default"}, "body_text":"error1"}'
+          - '{"@timestamp":"2024-07-18T14:49:33.467654000Z","data_stream":{"dataset":"generic.otel","namespace":"default"}, "body": {"text":"error1"}}'
   - is_false: errors
   - do:
       indices.get_data_stream:
@@ -90,3 +91,57 @@ setup:
   - is_true: $datastream-backing-index
   - match: { .$datastream-backing-index.settings.index.sort.field.0: "resource.attributes.host.name" }
   - match: { .$datastream-backing-index.settings.index.sort.field.1: "@timestamp" }
+---
+Event body:
+  - do:
+      bulk:
+        index: logs-generic.otel-default
+        refresh: true
+        body:
+          - create: {}
+          - "@timestamp": 2024-07-18T14:48:33.467654000Z
+            resource:
+              attributes:
+                service.name: my-service
+            attributes:
+              event.name: foo
+            body:
+              structured:
+                foo:
+                  bar: baz
+  - is_false: errors
+  - do:
+      indices.get_data_stream:
+        name: logs-generic.otel-default
+  - set: { data_streams.0.indices.0.index_name: datastream-backing-index }
+  - do:
+      indices.get_mapping:
+        index: $datastream-backing-index
+  - is_true: $datastream-backing-index
+  - match: { .$datastream-backing-index.mappings.properties.body.properties.structured.properties.foo\.bar.type: "keyword" }
+---
+Structured log body:
+  - do:
+      bulk:
+        index: logs-generic.otel-default
+        refresh: true
+        body:
+          - create: {}
+          - "@timestamp": 2024-07-18T14:48:33.467654000Z
+            resource:
+              attributes:
+                service.name: my-service
+            body:
+              flattened:
+                foo:
+                  bar: baz
+  - is_false: errors
+  - do:
+      indices.get_data_stream:
+        name: logs-generic.otel-default
+  - set: { data_streams.0.indices.0.index_name: datastream-backing-index }
+  - do:
+      indices.get_mapping:
+        index: $datastream-backing-index
+  - is_true: $datastream-backing-index
+  - match: { .$datastream-backing-index.mappings.properties.body.properties.flattened.type: "flattened" }

From f3896233a4925d06e72b759ec85082854696e003 Mon Sep 17 00:00:00 2001
From: Nikolaj Volgushev 
Date: Wed, 9 Oct 2024 13:46:09 +0200
Subject: [PATCH 78/85] Revert "Fix BWC for file-settings based role mappings
 (#113900)" and related  (#114326)

Revert "Fix BWC for file-settings based role mappings (#113900)" and related changes. Reverted commits:

- 763764c7fac0d5738534e632d7da327711a272d0
- bc8f9dc7f3882a461d4b89d69c7554a4cb3858ac
- ce07060dce69f961c0906079529e91c7dd7d4b48

This is due to a bug in the above fix. We will reintroduce a paired down version of the fix in a subsequent PR.
---
 docs/changelog/113900.yaml                    |   5 -
 .../rolemapping/GetRoleMappingsResponse.java  |   5 -
 .../RoleMappingFileSettingsIT.java            | 170 +++++-------------
 .../xpack/security/Security.java              |   3 +-
 .../role/TransportDeleteRoleAction.java       |  22 +--
 .../TransportDeleteRoleMappingAction.java     |  18 +-
 .../TransportGetRoleMappingsAction.java       |  41 +----
 .../TransportPutRoleMappingAction.java        |  28 +--
 .../mapper/ClusterStateRoleMapper.java        |  31 +---
 .../test/SecuritySettingsSource.java          |   2 +-
 .../role/TransportDeleteRoleActionTests.java  |  10 +-
 .../TransportGetRoleMappingsActionTests.java  |   8 +-
 .../TransportPutRoleMappingActionTests.java   |  30 +---
 .../mapper/ClusterStateRoleMapperTests.java   |  11 +-
 14 files changed, 71 insertions(+), 313 deletions(-)
 delete mode 100644 docs/changelog/113900.yaml

diff --git a/docs/changelog/113900.yaml b/docs/changelog/113900.yaml
deleted file mode 100644
index 25f833d25178..000000000000
--- a/docs/changelog/113900.yaml
+++ /dev/null
@@ -1,5 +0,0 @@
-pr: 113900
-summary: Fix BWC for file-settings based role mappings
-area: Authentication
-type: bug
-issues: []
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsResponse.java
index 4f18411ac3af..13a751829797 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsResponse.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsResponse.java
@@ -11,7 +11,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
 import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping;
 
 import java.io.IOException;
-import java.util.Collection;
 
 /**
  * Response to {@link GetRoleMappingsAction get role-mappings API}.
@@ -22,10 +21,6 @@ public class GetRoleMappingsResponse extends ActionResponse {
 
     private final ExpressionRoleMapping[] mappings;
 
-    public GetRoleMappingsResponse(Collection mappings) {
-        this(mappings.toArray(new ExpressionRoleMapping[0]));
-    }
-
     public GetRoleMappingsResponse(ExpressionRoleMapping... mappings) {
         this.mappings = mappings;
     }
diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java
index 19c18bf855b4..38dd7116acce 100644
--- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java
+++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java
@@ -34,7 +34,6 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsR
 import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingAction;
 import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest;
 import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder;
-import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse;
 import org.elasticsearch.xpack.core.security.authc.RealmConfig;
 import org.elasticsearch.xpack.core.security.authc.support.UserRoleMapper;
 import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping;
@@ -59,11 +58,12 @@ import java.util.function.Consumer;
 import static org.elasticsearch.indices.recovery.RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING;
 import static org.elasticsearch.xcontent.XContentType.JSON;
 import static org.elasticsearch.xpack.core.security.test.TestRestrictedIndices.INTERNAL_SECURITY_MAIN_INDEX_7;
-import static org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper.RESERVED_ROLE_MAPPING_SUFFIX;
 import static org.hamcrest.Matchers.allOf;
+import static org.hamcrest.Matchers.contains;
 import static org.hamcrest.Matchers.containsInAnyOrder;
 import static org.hamcrest.Matchers.containsString;
 import static org.hamcrest.Matchers.empty;
+import static org.hamcrest.Matchers.emptyArray;
 import static org.hamcrest.Matchers.equalTo;
 import static org.hamcrest.Matchers.hasSize;
 import static org.hamcrest.Matchers.notNullValue;
@@ -270,28 +270,21 @@ public class RoleMappingFileSettingsIT extends NativeRealmIntegTestCase {
             assertThat(resolveRolesFuture.get(), containsInAnyOrder("kibana_user", "fleet_user"));
         }
 
-        // the role mappings are retrievable by the role mapping action for BWC
-        assertGetResponseHasMappings(true, "everyone_kibana", "everyone_fleet");
-
-        // role mappings (with the same names) can be stored in the "native" store
-        {
-            PutRoleMappingResponse response = client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_kibana"))
-                .actionGet();
-            assertTrue(response.isCreated());
-            response = client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_fleet")).actionGet();
-            assertTrue(response.isCreated());
-        }
-        {
-            // deleting role mappings that exist in the native store and in cluster-state should result in success
-            var response = client().execute(DeleteRoleMappingAction.INSTANCE, deleteRequest("everyone_kibana")).actionGet();
-            assertTrue(response.isFound());
-            response = client().execute(DeleteRoleMappingAction.INSTANCE, deleteRequest("everyone_fleet")).actionGet();
-            assertTrue(response.isFound());
-        }
+        // the role mappings are not retrievable by the role mapping action (which only accesses "native" i.e. index-based role mappings)
+        var request = new GetRoleMappingsRequest();
+        request.setNames("everyone_kibana", "everyone_fleet");
+        var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get();
+        assertFalse(response.hasMappings());
+        assertThat(response.mappings(), emptyArray());
 
+        // role mappings (with the same names) can also be stored in the "native" store
+        var putRoleMappingResponse = client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_kibana")).actionGet();
+        assertTrue(putRoleMappingResponse.isCreated());
+        putRoleMappingResponse = client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_fleet")).actionGet();
+        assertTrue(putRoleMappingResponse.isCreated());
     }
 
-    public void testClusterStateRoleMappingsAddedThenDeleted() throws Exception {
+    public void testRoleMappingsApplied() throws Exception {
         ensureGreen();
 
         var savedClusterState = setupClusterStateListener(internalCluster().getMasterName(), "everyone_kibana");
@@ -300,12 +293,6 @@ public class RoleMappingFileSettingsIT extends NativeRealmIntegTestCase {
         assertRoleMappingsSaveOK(savedClusterState.v1(), savedClusterState.v2());
         logger.info("---> cleanup cluster settings...");
 
-        {
-            // Deleting non-existent native role mappings returns not found even if they exist in config file
-            var response = client().execute(DeleteRoleMappingAction.INSTANCE, deleteRequest("everyone_kibana")).get();
-            assertFalse(response.isFound());
-        }
-
         savedClusterState = setupClusterStateListenerForCleanup(internalCluster().getMasterName());
 
         writeJSONFile(internalCluster().getMasterName(), emptyJSON, logger, versionCounter);
@@ -320,15 +307,40 @@ public class RoleMappingFileSettingsIT extends NativeRealmIntegTestCase {
             clusterStateResponse.getState().metadata().persistentSettings().get(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey())
         );
 
-        // cluster-state role mapping was removed and is not returned in the API anymore
+        // native role mappings are not affected by the removal of the cluster-state based ones
         {
             var request = new GetRoleMappingsRequest();
             request.setNames("everyone_kibana", "everyone_fleet");
             var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get();
-            assertFalse(response.hasMappings());
+            assertTrue(response.hasMappings());
+            assertThat(
+                Arrays.stream(response.mappings()).map(ExpressionRoleMapping::getName).toList(),
+                containsInAnyOrder("everyone_kibana", "everyone_fleet")
+            );
         }
 
-        // no role mappings means no roles are resolved
+        // and roles are resolved based on the native role mappings
+        for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) {
+            PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>();
+            userRoleMapper.resolveRoles(
+                new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)),
+                resolveRolesFuture
+            );
+            assertThat(resolveRolesFuture.get(), contains("kibana_user_native"));
+        }
+
+        {
+            var request = new DeleteRoleMappingRequest();
+            request.setName("everyone_kibana");
+            var response = client().execute(DeleteRoleMappingAction.INSTANCE, request).get();
+            assertTrue(response.isFound());
+            request = new DeleteRoleMappingRequest();
+            request.setName("everyone_fleet");
+            response = client().execute(DeleteRoleMappingAction.INSTANCE, request).get();
+            assertTrue(response.isFound());
+        }
+
+        // no roles are resolved now, because both native and cluster-state based stores have been cleared
         for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) {
             PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>();
             userRoleMapper.resolveRoles(
@@ -339,78 +351,6 @@ public class RoleMappingFileSettingsIT extends NativeRealmIntegTestCase {
         }
     }
 
-    public void testGetRoleMappings() throws Exception {
-        ensureGreen();
-
-        final List nativeMappings = List.of("everyone_kibana", "_everyone_kibana", "zzz_mapping", "123_mapping");
-        for (var mapping : nativeMappings) {
-            client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest(mapping)).actionGet();
-        }
-
-        var savedClusterState = setupClusterStateListener(internalCluster().getMasterName(), "everyone_kibana");
-        writeJSONFile(internalCluster().getMasterName(), testJSON, logger, versionCounter);
-        boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS);
-        assertTrue(awaitSuccessful);
-
-        var request = new GetRoleMappingsRequest();
-        var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get();
-        assertTrue(response.hasMappings());
-        assertThat(
-            Arrays.stream(response.mappings()).map(ExpressionRoleMapping::getName).toList(),
-            containsInAnyOrder(
-                "everyone_kibana",
-                "everyone_kibana" + RESERVED_ROLE_MAPPING_SUFFIX,
-                "_everyone_kibana",
-                "everyone_fleet" + RESERVED_ROLE_MAPPING_SUFFIX,
-                "zzz_mapping",
-                "123_mapping"
-            )
-        );
-
-        int readOnlyCount = 0;
-        // assert that cluster-state role mappings come last
-        for (ExpressionRoleMapping mapping : response.mappings()) {
-            readOnlyCount = mapping.getName().endsWith(RESERVED_ROLE_MAPPING_SUFFIX) ? readOnlyCount + 1 : readOnlyCount;
-        }
-        // Two sourced from cluster-state
-        assertEquals(readOnlyCount, 2);
-
-        // it's possible to delete overlapping native role mapping
-        assertTrue(client().execute(DeleteRoleMappingAction.INSTANCE, deleteRequest("everyone_kibana")).actionGet().isFound());
-
-        // Fetch a specific file based role
-        request = new GetRoleMappingsRequest();
-        request.setNames("everyone_kibana" + RESERVED_ROLE_MAPPING_SUFFIX);
-        response = client().execute(GetRoleMappingsAction.INSTANCE, request).get();
-        assertTrue(response.hasMappings());
-        assertThat(
-            Arrays.stream(response.mappings()).map(ExpressionRoleMapping::getName).toList(),
-            containsInAnyOrder("everyone_kibana" + RESERVED_ROLE_MAPPING_SUFFIX)
-        );
-
-        savedClusterState = setupClusterStateListenerForCleanup(internalCluster().getMasterName());
-        writeJSONFile(internalCluster().getMasterName(), emptyJSON, logger, versionCounter);
-        awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS);
-        assertTrue(awaitSuccessful);
-
-        final ClusterStateResponse clusterStateResponse = clusterAdmin().state(
-            new ClusterStateRequest(TEST_REQUEST_TIMEOUT).waitForMetadataVersion(savedClusterState.v2().get())
-        ).get();
-
-        assertNull(
-            clusterStateResponse.getState().metadata().persistentSettings().get(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey())
-        );
-
-        // Make sure remaining native mappings can still be fetched
-        request = new GetRoleMappingsRequest();
-        response = client().execute(GetRoleMappingsAction.INSTANCE, request).get();
-        assertTrue(response.hasMappings());
-        assertThat(
-            Arrays.stream(response.mappings()).map(ExpressionRoleMapping::getName).toList(),
-            containsInAnyOrder("_everyone_kibana", "zzz_mapping", "123_mapping")
-        );
-    }
-
     public static Tuple setupClusterStateListenerForError(
         ClusterService clusterService,
         Consumer errorMetadataConsumer
@@ -493,8 +433,11 @@ public class RoleMappingFileSettingsIT extends NativeRealmIntegTestCase {
             boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS);
             assertTrue(awaitSuccessful);
 
-            // even if index is closed, cluster-state role mappings are still returned
-            assertGetResponseHasMappings(true, "everyone_kibana", "everyone_fleet");
+            // no native role mappings exist
+            var request = new GetRoleMappingsRequest();
+            request.setNames("everyone_kibana", "everyone_fleet");
+            var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get();
+            assertFalse(response.hasMappings());
 
             // cluster state settings are also applied
             var clusterStateResponse = clusterAdmin().state(
@@ -533,12 +476,6 @@ public class RoleMappingFileSettingsIT extends NativeRealmIntegTestCase {
         }
     }
 
-    private DeleteRoleMappingRequest deleteRequest(String name) {
-        var request = new DeleteRoleMappingRequest();
-        request.setName(name);
-        return request;
-    }
-
     private PutRoleMappingRequest sampleRestRequest(String name) throws Exception {
         var json = """
             {
@@ -557,17 +494,4 @@ public class RoleMappingFileSettingsIT extends NativeRealmIntegTestCase {
             return new PutRoleMappingRequestBuilder(null).source(name, parser).request();
         }
     }
-
-    private static void assertGetResponseHasMappings(boolean readOnly, String... mappings) throws InterruptedException, ExecutionException {
-        var request = new GetRoleMappingsRequest();
-        request.setNames(mappings);
-        var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get();
-        assertTrue(response.hasMappings());
-        assertThat(
-            Arrays.stream(response.mappings()).map(ExpressionRoleMapping::getName).toList(),
-            containsInAnyOrder(
-                Arrays.stream(mappings).map(mapping -> mapping + (readOnly ? RESERVED_ROLE_MAPPING_SUFFIX : "")).toArray(String[]::new)
-            )
-        );
-    }
 }
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java
index f4d9360d1ed8..79a00fa1293b 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java
@@ -897,8 +897,7 @@ public class Security extends Plugin
             reservedRealm
         );
         components.add(nativeUsersStore);
-        components.add(clusterStateRoleMapper);
-        components.add(nativeRoleMappingStore);
+        components.add(new PluginComponentBinding<>(NativeRoleMappingStore.class, nativeRoleMappingStore));
         components.add(new PluginComponentBinding<>(UserRoleMapper.class, userRoleMapper));
         components.add(reservedRealm);
         components.add(realms);
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleAction.java
index 569cdc1a79fd..e8d248233415 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleAction.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleAction.java
@@ -10,7 +10,6 @@ import org.apache.logging.log4j.util.Supplier;
 import org.elasticsearch.action.ActionListener;
 import org.elasticsearch.action.support.ActionFilters;
 import org.elasticsearch.action.support.TransportAction;
-import org.elasticsearch.common.logging.HeaderWarning;
 import org.elasticsearch.common.util.concurrent.EsExecutors;
 import org.elasticsearch.injection.guice.Inject;
 import org.elasticsearch.tasks.Task;
@@ -18,7 +17,6 @@ import org.elasticsearch.transport.TransportService;
 import org.elasticsearch.xpack.core.security.action.role.DeleteRoleAction;
 import org.elasticsearch.xpack.core.security.action.role.DeleteRoleRequest;
 import org.elasticsearch.xpack.core.security.action.role.DeleteRoleResponse;
-import org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper;
 import org.elasticsearch.xpack.security.authz.ReservedRoleNameChecker;
 import org.elasticsearch.xpack.security.authz.store.NativeRolesStore;
 
@@ -27,20 +25,16 @@ public class TransportDeleteRoleAction extends TransportAction {
-                if (clusterStateRoleMapper.hasMapping(request.name())) {
-                    // Allow to delete a mapping with the same name in the native role mapping store as the file_settings namespace, but
-                    // add a warning header to signal to the caller that this could be a problem.
-                    HeaderWarning.addWarning(
-                        "A read only role mapping with the same name ["
-                            + request.name()
-                            + "] has been previously been defined in a configuration file. "
-                            + "The read only role mapping will still be active."
-                    );
-                }
-                return new DeleteRoleResponse(found);
-            }));
+            rolesStore.deleteRole(request, listener.safeMap(DeleteRoleResponse::new));
         } catch (Exception e) {
             logger.error((Supplier) () -> "failed to delete role [" + request.name() + "]", e);
             listener.onFailure(e);
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java
index 467cc1c8a902..74129facae70 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java
@@ -9,7 +9,6 @@ package org.elasticsearch.xpack.security.action.rolemapping;
 import org.elasticsearch.action.ActionListener;
 import org.elasticsearch.action.support.ActionFilters;
 import org.elasticsearch.action.support.HandledTransportAction;
-import org.elasticsearch.common.logging.HeaderWarning;
 import org.elasticsearch.common.util.concurrent.EsExecutors;
 import org.elasticsearch.injection.guice.Inject;
 import org.elasticsearch.tasks.Task;
@@ -17,20 +16,17 @@ import org.elasticsearch.transport.TransportService;
 import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingAction;
 import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingRequest;
 import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingResponse;
-import org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper;
 import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore;
 
 public class TransportDeleteRoleMappingAction extends HandledTransportAction {
 
     private final NativeRoleMappingStore roleMappingStore;
-    private final ClusterStateRoleMapper clusterStateRoleMapper;
 
     @Inject
     public TransportDeleteRoleMappingAction(
         ActionFilters actionFilters,
         TransportService transportService,
-        NativeRoleMappingStore roleMappingStore,
-        ClusterStateRoleMapper clusterStateRoleMapper
+        NativeRoleMappingStore roleMappingStore
     ) {
         super(
             DeleteRoleMappingAction.NAME,
@@ -40,22 +36,10 @@ public class TransportDeleteRoleMappingAction extends HandledTransportAction listener) {
-        if (clusterStateRoleMapper.hasMapping(request.getName())) {
-            // Since it's allowed to add a mapping with the same name in the native role mapping store as the file_settings namespace,
-            // a warning header is added to signal to the caller that this could be a problem.
-            HeaderWarning.addWarning(
-                "A read only role mapping with the same name ["
-                    + request.getName()
-                    + "] has been previously been defined in a configuration file. The role mapping ["
-                    + request.getName()
-                    + "] defined in the configuration file is read only, will not be deleted, and will remain active."
-            );
-        }
         roleMappingStore.deleteRoleMapping(request, listener.safeMap(DeleteRoleMappingResponse::new));
     }
 }
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsAction.java
index db0ee01af70e..ac0d3177cca0 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsAction.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsAction.java
@@ -17,30 +17,21 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsA
 import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsRequest;
 import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsResponse;
 import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping;
-import org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper;
 import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore;
 
 import java.util.Arrays;
-import java.util.Comparator;
 import java.util.HashSet;
-import java.util.List;
 import java.util.Set;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
-
-import static org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper.RESERVED_ROLE_MAPPING_SUFFIX;
 
 public class TransportGetRoleMappingsAction extends HandledTransportAction {
 
     private final NativeRoleMappingStore roleMappingStore;
-    private final ClusterStateRoleMapper clusterStateRoleMapper;
 
     @Inject
     public TransportGetRoleMappingsAction(
         ActionFilters actionFilters,
         TransportService transportService,
-        NativeRoleMappingStore nativeRoleMappingStore,
-        ClusterStateRoleMapper clusterStateRoleMapper
+        NativeRoleMappingStore nativeRoleMappingStore
     ) {
         super(
             GetRoleMappingsAction.NAME,
@@ -50,7 +41,6 @@ public class TransportGetRoleMappingsAction extends HandledTransportAction(Arrays.asList(request.getNames()));
         }
-        roleMappingStore.getRoleMappings(names, ActionListener.wrap(mappings -> {
-            List combinedRoleMappings = Stream.concat(
-                mappings.stream(),
-                clusterStateRoleMapper.getMappings(names == null ? null : names.stream().map(name -> {
-                    // If a read-only role is fetched by name including suffix, remove suffix
-                    return name.endsWith(RESERVED_ROLE_MAPPING_SUFFIX)
-                        ? name.substring(0, name.length() - RESERVED_ROLE_MAPPING_SUFFIX.length())
-                        : name;
-                }).collect(Collectors.toSet()))
-                    .stream()
-                    .map(this::cloneAndMarkAsReadOnly)
-                    .sorted(Comparator.comparing(ExpressionRoleMapping::getName))
-            ).toList();
-            listener.onResponse(new GetRoleMappingsResponse(combinedRoleMappings));
+        this.roleMappingStore.getRoleMappings(names, ActionListener.wrap(mappings -> {
+            ExpressionRoleMapping[] array = mappings.toArray(new ExpressionRoleMapping[mappings.size()]);
+            listener.onResponse(new GetRoleMappingsResponse(array));
         }, listener::onFailure));
     }
-
-    private ExpressionRoleMapping cloneAndMarkAsReadOnly(ExpressionRoleMapping mapping) {
-        // Mark role mappings from cluster state as "read only" by adding a suffix to their name
-        return new ExpressionRoleMapping(
-            mapping.getName() + RESERVED_ROLE_MAPPING_SUFFIX,
-            mapping.getExpression(),
-            mapping.getRoles(),
-            mapping.getRoleTemplates(),
-            mapping.getMetadata(),
-            mapping.isEnabled()
-        );
-    }
 }
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java
index 76f520bed517..82a3b4f00006 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java
@@ -9,7 +9,6 @@ package org.elasticsearch.xpack.security.action.rolemapping;
 import org.elasticsearch.action.ActionListener;
 import org.elasticsearch.action.support.ActionFilters;
 import org.elasticsearch.action.support.HandledTransportAction;
-import org.elasticsearch.common.logging.HeaderWarning;
 import org.elasticsearch.common.util.concurrent.EsExecutors;
 import org.elasticsearch.injection.guice.Inject;
 import org.elasticsearch.tasks.Task;
@@ -17,52 +16,27 @@ import org.elasticsearch.transport.TransportService;
 import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingAction;
 import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest;
 import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse;
-import org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper;
 import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore;
 
-import static org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper.RESERVED_ROLE_MAPPING_SUFFIX;
-
 public class TransportPutRoleMappingAction extends HandledTransportAction {
 
     private final NativeRoleMappingStore roleMappingStore;
-    private final ClusterStateRoleMapper clusterStateRoleMapper;
 
     @Inject
     public TransportPutRoleMappingAction(
         ActionFilters actionFilters,
         TransportService transportService,
-        NativeRoleMappingStore roleMappingStore,
-        ClusterStateRoleMapper clusterStateRoleMapper
+        NativeRoleMappingStore roleMappingStore
     ) {
         super(PutRoleMappingAction.NAME, transportService, actionFilters, PutRoleMappingRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE);
         this.roleMappingStore = roleMappingStore;
-        this.clusterStateRoleMapper = clusterStateRoleMapper;
     }
 
     @Override
     protected void doExecute(Task task, final PutRoleMappingRequest request, final ActionListener listener) {
-        validateMappingName(request.getName());
-        if (clusterStateRoleMapper.hasMapping(request.getName())) {
-            // Allow to define a mapping with the same name in the native role mapping store as the file_settings namespace, but add a
-            // warning header to signal to the caller that this could be a problem.
-            HeaderWarning.addWarning(
-                "A read only role mapping with the same name ["
-                    + request.getName()
-                    + "] has been previously been defined in a configuration file. "
-                    + "Both role mappings will be used to determine role assignments."
-            );
-        }
         roleMappingStore.putRoleMapping(
             request,
             ActionListener.wrap(created -> listener.onResponse(new PutRoleMappingResponse(created)), listener::onFailure)
         );
     }
-
-    private static void validateMappingName(String mappingName) {
-        if (mappingName.endsWith(RESERVED_ROLE_MAPPING_SUFFIX)) {
-            throw new IllegalArgumentException(
-                "Invalid mapping name [" + mappingName + "]. [" + RESERVED_ROLE_MAPPING_SUFFIX + "] is not an allowed suffix"
-            );
-        }
-    }
 }
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapper.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapper.java
index baea5970b463..9a6e9e75c468 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapper.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapper.java
@@ -14,16 +14,13 @@ import org.elasticsearch.cluster.ClusterChangedEvent;
 import org.elasticsearch.cluster.ClusterStateListener;
 import org.elasticsearch.cluster.service.ClusterService;
 import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.core.Nullable;
 import org.elasticsearch.script.ScriptService;
 import org.elasticsearch.xpack.core.security.authc.support.UserRoleMapper;
 import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping;
 import org.elasticsearch.xpack.core.security.authz.RoleMappingMetadata;
 
-import java.util.Arrays;
 import java.util.Objects;
 import java.util.Set;
-import java.util.stream.Collectors;
 
 import static org.elasticsearch.xpack.core.security.SecurityExtension.SecurityComponents;
 
@@ -31,7 +28,8 @@ import static org.elasticsearch.xpack.core.security.SecurityExtension.SecurityCo
  * A role mapper the reads the role mapping rules (i.e. {@link ExpressionRoleMapping}s) from the cluster state
  * (i.e. {@link RoleMappingMetadata}). This is not enabled by default.
  */
-public class ClusterStateRoleMapper extends AbstractRoleMapperClearRealmCache implements ClusterStateListener {
+public final class ClusterStateRoleMapper extends AbstractRoleMapperClearRealmCache implements ClusterStateListener {
+
     /**
      * This setting is never registered by the xpack security plugin - in order to enable the
      * cluster-state based role mapper another plugin must register it as a boolean setting
@@ -47,7 +45,6 @@ public class ClusterStateRoleMapper extends AbstractRoleMapperClearRealmCache im
      * 
      */
     public static final String CLUSTER_STATE_ROLE_MAPPINGS_ENABLED = "xpack.security.authc.cluster_state_role_mappings.enabled";
-    public static final String RESERVED_ROLE_MAPPING_SUFFIX = "-read-only-operator-config";
     private static final Logger logger = LogManager.getLogger(ClusterStateRoleMapper.class);
 
     private final ScriptService scriptService;
@@ -57,8 +54,8 @@ public class ClusterStateRoleMapper extends AbstractRoleMapperClearRealmCache im
     public ClusterStateRoleMapper(Settings settings, ScriptService scriptService, ClusterService clusterService) {
         this.scriptService = scriptService;
         this.clusterService = clusterService;
-        // this role mapper is enabled by default and only code in other plugins can disable it
-        this.enabled = settings.getAsBoolean(CLUSTER_STATE_ROLE_MAPPINGS_ENABLED, true);
+        // this role mapper is disabled by default and only code in other plugins can enable it
+        this.enabled = settings.getAsBoolean(CLUSTER_STATE_ROLE_MAPPINGS_ENABLED, false);
         if (this.enabled) {
             clusterService.addListener(this);
         }
@@ -84,30 +81,12 @@ public class ClusterStateRoleMapper extends AbstractRoleMapperClearRealmCache im
         }
     }
 
-    public boolean hasMapping(String name) {
-        return getMappings().stream().map(ExpressionRoleMapping::getName).anyMatch(name::equals);
-    }
-
-    public Set getMappings(@Nullable Set names) {
-        if (enabled == false) {
-            return Set.of();
-        }
-        final Set mappings = getMappings();
-        if (names == null || names.isEmpty()) {
-            return mappings;
-        }
-        return mappings.stream().filter(it -> names.contains(it.getName())).collect(Collectors.toSet());
-    }
-
     private Set getMappings() {
         if (enabled == false) {
             return Set.of();
         } else {
             final Set mappings = RoleMappingMetadata.getFromClusterState(clusterService.state()).getRoleMappings();
-            logger.trace(
-                "Retrieved mapping(s) {} from cluster state",
-                Arrays.toString(mappings.stream().map(ExpressionRoleMapping::getName).toArray(String[]::new))
-            );
+            logger.trace("Retrieved [{}] mapping(s) from cluster state", mappings.size());
             return mappings;
         }
     }
diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java
index ce5aaacdb92b..6d7817db8ec0 100644
--- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java
+++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java
@@ -403,7 +403,7 @@ public class SecuritySettingsSource extends NodeConfigurationSource {
         );
         public static final Setting CLUSTER_STATE_ROLE_MAPPINGS_ENABLED = Setting.boolSetting(
             "xpack.security.authc.cluster_state_role_mappings.enabled",
-            true,
+            false,
             Setting.Property.NodeScope
         );
         public static final Setting NATIVE_ROLES_ENABLED = Setting.boolSetting(
diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java
index d647088017dc..84e4dc402c76 100644
--- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java
+++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java
@@ -19,7 +19,6 @@ import org.elasticsearch.xpack.core.security.action.role.DeleteRoleRequest;
 import org.elasticsearch.xpack.core.security.action.role.DeleteRoleResponse;
 import org.elasticsearch.xpack.core.security.authc.AuthenticationTestHelper;
 import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore;
-import org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper;
 import org.elasticsearch.xpack.security.authz.ReservedRoleNameChecker;
 import org.elasticsearch.xpack.security.authz.store.NativeRolesStore;
 import org.junit.BeforeClass;
@@ -67,8 +66,7 @@ public class TransportDeleteRoleActionTests extends ESTestCase {
             mock(ActionFilters.class),
             rolesStore,
             transportService,
-            new ReservedRoleNameChecker.Default(),
-            mock(ClusterStateRoleMapper.class)
+            new ReservedRoleNameChecker.Default()
         );
 
         DeleteRoleRequest request = new DeleteRoleRequest();
@@ -117,8 +115,7 @@ public class TransportDeleteRoleActionTests extends ESTestCase {
             mock(ActionFilters.class),
             rolesStore,
             transportService,
-            new ReservedRoleNameChecker.Default(),
-            mock(ClusterStateRoleMapper.class)
+            new ReservedRoleNameChecker.Default()
         );
 
         DeleteRoleRequest request = new DeleteRoleRequest();
@@ -171,8 +168,7 @@ public class TransportDeleteRoleActionTests extends ESTestCase {
             mock(ActionFilters.class),
             rolesStore,
             transportService,
-            new ReservedRoleNameChecker.Default(),
-            mock(ClusterStateRoleMapper.class)
+            new ReservedRoleNameChecker.Default()
         );
 
         DeleteRoleRequest request = new DeleteRoleRequest();
diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java
index 799e0c334172..6e8698f095d3 100644
--- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java
+++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java
@@ -19,7 +19,6 @@ import org.elasticsearch.transport.TransportService;
 import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsRequest;
 import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsResponse;
 import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping;
-import org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper;
 import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore;
 import org.hamcrest.Matchers;
 import org.junit.Before;
@@ -35,16 +34,13 @@ import static org.hamcrest.Matchers.arrayContainingInAnyOrder;
 import static org.hamcrest.Matchers.containsInAnyOrder;
 import static org.hamcrest.Matchers.notNullValue;
 import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.ArgumentMatchers.anySet;
 import static org.mockito.ArgumentMatchers.nullable;
 import static org.mockito.Mockito.doAnswer;
 import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
 
 public class TransportGetRoleMappingsActionTests extends ESTestCase {
 
     private NativeRoleMappingStore store;
-    private ClusterStateRoleMapper clusterStateRoleMapper;
     private TransportGetRoleMappingsAction action;
     private AtomicReference> namesRef;
     private List result;
@@ -53,8 +49,6 @@ public class TransportGetRoleMappingsActionTests extends ESTestCase {
     @Before
     public void setupMocks() {
         store = mock(NativeRoleMappingStore.class);
-        clusterStateRoleMapper = mock(ClusterStateRoleMapper.class);
-        when(clusterStateRoleMapper.getMappings(anySet())).thenReturn(Set.of());
         TransportService transportService = new TransportService(
             Settings.EMPTY,
             mock(Transport.class),
@@ -64,7 +58,7 @@ public class TransportGetRoleMappingsActionTests extends ESTestCase {
             null,
             Collections.emptySet()
         );
-        action = new TransportGetRoleMappingsAction(mock(ActionFilters.class), transportService, store, clusterStateRoleMapper);
+        action = new TransportGetRoleMappingsAction(mock(ActionFilters.class), transportService, store);
 
         namesRef = new AtomicReference<>(null);
         result = Collections.emptyList();
diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java
index 0bb3e7dd4ac3..6f789a10a3a6 100644
--- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java
+++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java
@@ -19,32 +19,26 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRe
 import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse;
 import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping;
 import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.FieldExpression;
-import org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper;
 import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore;
 import org.junit.Before;
 
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.Map;
-import java.util.Set;
 import java.util.concurrent.atomic.AtomicReference;
 
-import static org.elasticsearch.xpack.security.authc.support.mapper.ClusterStateRoleMapper.RESERVED_ROLE_MAPPING_SUFFIX;
 import static org.hamcrest.Matchers.aMapWithSize;
 import static org.hamcrest.Matchers.contains;
 import static org.hamcrest.Matchers.equalTo;
 import static org.hamcrest.Matchers.is;
 import static org.hamcrest.Matchers.iterableWithSize;
 import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.ArgumentMatchers.anySet;
 import static org.mockito.Mockito.doAnswer;
 import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
 
 public class TransportPutRoleMappingActionTests extends ESTestCase {
 
     private NativeRoleMappingStore store;
-    private ClusterStateRoleMapper clusterStateRoleMapper;
     private TransportPutRoleMappingAction action;
     private AtomicReference requestRef;
 
@@ -52,9 +46,6 @@ public class TransportPutRoleMappingActionTests extends ESTestCase {
     @Before
     public void setupMocks() {
         store = mock(NativeRoleMappingStore.class);
-        clusterStateRoleMapper = mock(ClusterStateRoleMapper.class);
-        when(clusterStateRoleMapper.getMappings(anySet())).thenReturn(Set.of());
-        when(clusterStateRoleMapper.hasMapping(any())).thenReturn(false);
         TransportService transportService = new TransportService(
             Settings.EMPTY,
             mock(Transport.class),
@@ -64,7 +55,7 @@ public class TransportPutRoleMappingActionTests extends ESTestCase {
             null,
             Collections.emptySet()
         );
-        action = new TransportPutRoleMappingAction(mock(ActionFilters.class), transportService, store, clusterStateRoleMapper);
+        action = new TransportPutRoleMappingAction(mock(ActionFilters.class), transportService, store);
 
         requestRef = new AtomicReference<>(null);
 
@@ -94,25 +85,6 @@ public class TransportPutRoleMappingActionTests extends ESTestCase {
         assertThat(mapping.getMetadata().get("dumb"), equalTo(true));
     }
 
-    public void testPutMappingWithInvalidName() {
-        final FieldExpression expression = new FieldExpression("username", Collections.singletonList(new FieldExpression.FieldValue("*")));
-        IllegalArgumentException illegalArgumentException = expectThrows(
-            IllegalArgumentException.class,
-            () -> put("anarchy" + RESERVED_ROLE_MAPPING_SUFFIX, expression, "superuser", Collections.singletonMap("dumb", true))
-        );
-
-        assertThat(
-            illegalArgumentException.getMessage(),
-            equalTo(
-                "Invalid mapping name [anarchy"
-                    + RESERVED_ROLE_MAPPING_SUFFIX
-                    + "]. ["
-                    + RESERVED_ROLE_MAPPING_SUFFIX
-                    + "] is not an allowed suffix"
-            )
-        );
-    }
-
     private PutRoleMappingResponse put(String name, FieldExpression expression, String role, Map metadata)
         throws Exception {
         final PutRoleMappingRequest request = new PutRoleMappingRequest();
diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapperTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapperTests.java
index 063245e00447..74221c113327 100644
--- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapperTests.java
+++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapperTests.java
@@ -56,12 +56,12 @@ public class ClusterStateRoleMapperTests extends ESTestCase {
             () -> 1L
         );
         clusterService = mock(ClusterService.class);
-        disabledSettings = Settings.builder().put("xpack.security.authc.cluster_state_role_mappings.enabled", false).build();
+        enabledSettings = Settings.builder().put("xpack.security.authc.cluster_state_role_mappings.enabled", true).build();
         if (randomBoolean()) {
-            enabledSettings = Settings.builder().put("xpack.security.authc.cluster_state_role_mappings.enabled", true).build();
+            disabledSettings = Settings.builder().put("xpack.security.authc.cluster_state_role_mappings.enabled", false).build();
         } else {
-            // the cluster state role mapper is enabled by default
-            enabledSettings = Settings.EMPTY;
+            // the cluster state role mapper is disabled by default
+            disabledSettings = Settings.EMPTY;
         }
     }
 
@@ -95,9 +95,6 @@ public class ClusterStateRoleMapperTests extends ESTestCase {
             verify(mapping1).isEnabled();
             verify(mapping2).isEnabled();
             verify(mapping3).isEnabled();
-            verify(mapping1).getName();
-            verify(mapping2).getName();
-            verify(mapping3).getName();
             verify(mapping2).getExpression();
             verify(mapping3).getExpression();
             verify(mapping3).getRoleNames(same(scriptService), same(expressionModel));

From e97aaa8c419129cc573e4d4c710d9ba858a4a3ef Mon Sep 17 00:00:00 2001
From: Johannes Mahne 
Date: Wed, 9 Oct 2024 13:47:24 +0200
Subject: [PATCH 79/85] Update forcemerge.asciidoc (#114377)

As per request https://github.com/elastic/elasticsearch/pull/114315#issuecomment-2400521895 doing the PR on the main branch.
---
 docs/reference/indices/forcemerge.asciidoc | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs/reference/indices/forcemerge.asciidoc b/docs/reference/indices/forcemerge.asciidoc
index 6eacaac5e7b2..e1581a3cfa63 100644
--- a/docs/reference/indices/forcemerge.asciidoc
+++ b/docs/reference/indices/forcemerge.asciidoc
@@ -56,7 +56,7 @@ documents can't be backed up incrementally.
 ===== Blocks during a force merge
 
 Calls to this API block until the merge is complete (unless request contains
-wait_for_completion=false, which is default true). If the client connection
+`wait_for_completion=false`, which is default `true`). If the client connection
 is lost before completion then the force merge process will continue in the
 background. Any new requests to force merge the same indices will also block
 until the ongoing force merge is complete.

From f6bf506584fb8d68d655768a9075bb32fe24daae Mon Sep 17 00:00:00 2001
From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com>
Date: Wed, 9 Oct 2024 14:57:09 +0300
Subject: [PATCH 80/85] Avoid noisy errors in testSyntheticSourceKeepArrays
 (#114391)

* Minimize storing array source

* restrict to fields

* revert changes for `addIgnoredFieldFromContext`

* fix test

* spotless

* count nulls

* Avoid noisy errors in testSyntheticSourceKeepArrays

* update

* update

* update

* update
---
 .../org/elasticsearch/index/mapper/MapperTestCase.java     | 7 ++-----
 1 file changed, 2 insertions(+), 5 deletions(-)

diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java
index eef7fc4e5008..d8298d49d3eb 100644
--- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java
@@ -1570,7 +1570,7 @@ public abstract class MapperTestCase extends MapperServiceTestCase {
             b.endObject();
         }));
 
-        int elementCount = randomIntBetween(1, 5);
+        int elementCount = randomIntBetween(2, 5);
         CheckedConsumer buildInput = (XContentBuilder builder) -> {
             example.buildInputArray(builder, elementCount);
         };
@@ -1581,10 +1581,7 @@ public abstract class MapperTestCase extends MapperServiceTestCase {
         builder.endObject();
         String expected = Strings.toString(builder);
         String actual = syntheticSource(mapperAll, buildInput);
-        // Check for single-element array, the array source is not stored in this case.
-        if (expected.replace("[", "").replace("]", "").equals(actual) == false) {
-            assertThat(actual, equalTo(expected));
-        }
+        assertThat(actual, equalTo(expected));
     }
 
     @Override

From cd0f9a4f3774c1254877f819c65c493e6cafe06a Mon Sep 17 00:00:00 2001
From: elasticsearchmachine
 <58790826+elasticsearchmachine@users.noreply.github.com>
Date: Wed, 9 Oct 2024 23:31:04 +1100
Subject: [PATCH 81/85] Mute
 org.elasticsearch.index.mapper.extras.ScaledFloatFieldMapperTests
 testSyntheticSourceKeepArrays #114406

---
 muted-tests.yml | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/muted-tests.yml b/muted-tests.yml
index ef27eeeffc14..22847e754ab9 100644
--- a/muted-tests.yml
+++ b/muted-tests.yml
@@ -377,6 +377,9 @@ tests:
 - class: org.elasticsearch.xpack.inference.services.cohere.CohereServiceTests
   method: testInfer_StreamRequest
   issue: https://github.com/elastic/elasticsearch/issues/114385
+- class: org.elasticsearch.index.mapper.extras.ScaledFloatFieldMapperTests
+  method: testSyntheticSourceKeepArrays
+  issue: https://github.com/elastic/elasticsearch/issues/114406
 
 # Examples:
 #

From 3953331ac534006f3c4d56460a1c0a18d5efa492 Mon Sep 17 00:00:00 2001
From: Patrick Doyle <810052+prdoyle@users.noreply.github.com>
Date: Wed, 9 Oct 2024 09:01:27 -0400
Subject: [PATCH 82/85] Entitlements for System.exit (#114015)

* Entitlements for System.exit

* Respond to Simon's comments

* Rename trampoline -> bridge

* Require exactly one bridge jar

* Use Type helpers to generate descriptor strings

* Various cleanup from PR comments

* Remove null "receiver" for static methods

* Use List instead of voidDescriptor

* Clarifying comment

* Whoops, getMethod

* SuppressForbidden System.exit

* Spotless

* Use embedded provider plugin to keep ASM off classpath

* Oops... forgot the punchline

* Move ASM license to impl

* Use ProviderLocator and simplify bridgeJar logic

* Avoid eager resolution of configurations during task configuration

* Remove compile-time dependency agent->bridge

---------

Co-authored-by: Mark Vieira 
---
 .../tools/entitlement-agent/README.md         |   2 +-
 .../tools/entitlement-agent/build.gradle      |  31 ++-
 .../tools/entitlement-agent/impl/build.gradle |  20 ++
 .../impl/licenses/asm-LICENSE.txt             |  26 +++
 .../impl/licenses/asm-NOTICE.txt              |   1 +
 .../impl/src/main/java/module-info.java       |  18 ++
 .../impl/InstrumentationServiceImpl.java      |  41 ++++
 .../impl/InstrumenterImpl.java                | 214 ++++++++++++++++++
 ...ent.instrumentation.InstrumentationService |  10 +
 .../src/main/java/module-info.java            |   8 +-
 .../entitlement/agent/EntitlementAgent.java   |  44 +++-
 .../entitlement/agent/Transformer.java        |  49 ++++
 .../InstrumentationService.java               |  25 ++
 .../instrumentation/Instrumenter.java         |  14 ++
 .../instrumentation/MethodKey.java            |  18 ++
 .../agent/EntitlementAgentTests.java          |  31 ++-
 .../tools/entitlement-bridge/README.md        |  11 +
 .../tools/entitlement-bridge/build.gradle     |  18 ++
 .../src/main/java/module-info.java            |   6 +-
 .../entitlement/api/EntitlementChecks.java    |  14 ++
 .../entitlement/api/EntitlementProvider.java  |  34 +++
 .../tools}/entitlement-runtime/README.md      |   7 -
 .../tools}/entitlement-runtime/build.gradle   |   4 +-
 .../src/main/java/module-info.java            |  19 ++
 .../api/ElasticsearchEntitlementManager.java  |  77 +++++++
 .../runtime/api/NotEntitledException.java     |  12 +-
 .../internals/EntitlementInternals.java       |  24 ++
 ...icsearch.entitlement.api.EntitlementChecks |  10 +
 libs/core/src/main/java/module-info.java      |   6 +-
 settings.gradle                               |   3 +
 30 files changed, 764 insertions(+), 33 deletions(-)
 create mode 100644 distribution/tools/entitlement-agent/impl/build.gradle
 create mode 100644 distribution/tools/entitlement-agent/impl/licenses/asm-LICENSE.txt
 create mode 100644 distribution/tools/entitlement-agent/impl/licenses/asm-NOTICE.txt
 create mode 100644 distribution/tools/entitlement-agent/impl/src/main/java/module-info.java
 create mode 100644 distribution/tools/entitlement-agent/impl/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java
 create mode 100644 distribution/tools/entitlement-agent/impl/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java
 create mode 100644 distribution/tools/entitlement-agent/impl/src/main/resources/META-INF/services/org.elasticsearch.entitlement.instrumentation.InstrumentationService
 create mode 100644 distribution/tools/entitlement-agent/src/main/java/org/elasticsearch/entitlement/agent/Transformer.java
 create mode 100644 distribution/tools/entitlement-agent/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java
 create mode 100644 distribution/tools/entitlement-agent/src/main/java/org/elasticsearch/entitlement/instrumentation/Instrumenter.java
 create mode 100644 distribution/tools/entitlement-agent/src/main/java/org/elasticsearch/entitlement/instrumentation/MethodKey.java
 create mode 100644 distribution/tools/entitlement-bridge/README.md
 create mode 100644 distribution/tools/entitlement-bridge/build.gradle
 rename {libs/entitlement-runtime => distribution/tools/entitlement-bridge}/src/main/java/module-info.java (72%)
 create mode 100644 distribution/tools/entitlement-bridge/src/main/java/org/elasticsearch/entitlement/api/EntitlementChecks.java
 create mode 100644 distribution/tools/entitlement-bridge/src/main/java/org/elasticsearch/entitlement/api/EntitlementProvider.java
 rename {libs => distribution/tools}/entitlement-runtime/README.md (51%)
 rename {libs => distribution/tools}/entitlement-runtime/build.gradle (76%)
 create mode 100644 distribution/tools/entitlement-runtime/src/main/java/module-info.java
 create mode 100644 distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementManager.java
 rename libs/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/api/EntitlementChecks.java => distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/api/NotEntitledException.java (69%)
 create mode 100644 distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/internals/EntitlementInternals.java
 create mode 100644 distribution/tools/entitlement-runtime/src/main/resources/META-INF/services/org.elasticsearch.entitlement.api.EntitlementChecks

diff --git a/distribution/tools/entitlement-agent/README.md b/distribution/tools/entitlement-agent/README.md
index ff40651706a7..f8224f4266fb 100644
--- a/distribution/tools/entitlement-agent/README.md
+++ b/distribution/tools/entitlement-agent/README.md
@@ -1,6 +1,6 @@
 ### Entitlement Agent
 
-This is a java agent that instruments sensitive class library methods with calls into the `entitlement-runtime` module to check for permissions granted under the _entitlements_ system.
+This is a java agent that instruments sensitive class library methods with calls into the `entitlement-bridge` module to check for permissions granted under the _entitlements_ system.
 
 The entitlements system provides an alternative to the legacy `SecurityManager` system, which is deprecated for removal.
 With this agent, the Elasticsearch server can retain some control over which class library methods can be invoked by which callers.
diff --git a/distribution/tools/entitlement-agent/build.gradle b/distribution/tools/entitlement-agent/build.gradle
index 56e2ffac53fd..3fa9d0f5ef83 100644
--- a/distribution/tools/entitlement-agent/build.gradle
+++ b/distribution/tools/entitlement-agent/build.gradle
@@ -7,21 +7,44 @@
  * License v3.0 only", or the "Server Side Public License, v 1".
  */
 
+import static java.util.stream.Collectors.joining
+
 apply plugin: 'elasticsearch.build'
+apply plugin: 'elasticsearch.embedded-providers'
+
+embeddedProviders {
+  impl 'entitlement-agent', project(':distribution:tools:entitlement-agent:impl')
+}
 
 configurations {
-  entitlementRuntime
+  entitlementBridge
 }
 
 dependencies {
-  entitlementRuntime project(":libs:elasticsearch-entitlement-runtime")
-  implementation project(":libs:elasticsearch-entitlement-runtime")
+  entitlementBridge project(":distribution:tools:entitlement-bridge")
+  compileOnly project(":libs:elasticsearch-core")
+  compileOnly project(":distribution:tools:entitlement-runtime")
   testImplementation project(":test:framework")
+  testImplementation project(":distribution:tools:entitlement-bridge")
+  testImplementation project(":distribution:tools:entitlement-agent:impl")
 }
 
 tasks.named('test').configure {
+  systemProperty "tests.security.manager", "false"
   dependsOn('jar')
-  jvmArgs "-javaagent:${ tasks.named('jar').flatMap{ it.archiveFile }.get()}"
+
+  // Register an argument provider to avoid eager resolution of configurations
+  jvmArgumentProviders.add(new CommandLineArgumentProvider() {
+    @Override
+    Iterable asArguments() {
+      return ["-javaagent:${tasks.jar.archiveFile.get()}", "-Des.entitlements.bridgeJar=${configurations.entitlementBridge.singleFile}"]
+    }
+  })
+
+
+  // The Elasticsearch build plugin automatically adds all compileOnly deps as testImplementation.
+  // We must not add the bridge this way because it is also on the boot classpath, and that would lead to jar hell.
+  classpath -= files(configurations.entitlementBridge)
 }
 
 tasks.named('jar').configure {
diff --git a/distribution/tools/entitlement-agent/impl/build.gradle b/distribution/tools/entitlement-agent/impl/build.gradle
new file mode 100644
index 000000000000..f73e21505d48
--- /dev/null
+++ b/distribution/tools/entitlement-agent/impl/build.gradle
@@ -0,0 +1,20 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+apply plugin: 'elasticsearch.build'
+
+dependencies {
+  compileOnly project(':distribution:tools:entitlement-agent')
+  implementation 'org.ow2.asm:asm:9.7'
+}
+
+tasks.named('forbiddenApisMain').configure {
+  replaceSignatureFiles 'jdk-signatures'
+}
+
diff --git a/distribution/tools/entitlement-agent/impl/licenses/asm-LICENSE.txt b/distribution/tools/entitlement-agent/impl/licenses/asm-LICENSE.txt
new file mode 100644
index 000000000000..afb064f2f266
--- /dev/null
+++ b/distribution/tools/entitlement-agent/impl/licenses/asm-LICENSE.txt
@@ -0,0 +1,26 @@
+Copyright (c) 2012 France Télécom
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+1. Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+3. Neither the name of the copyright holders nor the names of its
+   contributors may be used to endorse or promote products derived from
+   this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/distribution/tools/entitlement-agent/impl/licenses/asm-NOTICE.txt b/distribution/tools/entitlement-agent/impl/licenses/asm-NOTICE.txt
new file mode 100644
index 000000000000..8d1c8b69c3fc
--- /dev/null
+++ b/distribution/tools/entitlement-agent/impl/licenses/asm-NOTICE.txt
@@ -0,0 +1 @@
+ 
diff --git a/distribution/tools/entitlement-agent/impl/src/main/java/module-info.java b/distribution/tools/entitlement-agent/impl/src/main/java/module-info.java
new file mode 100644
index 000000000000..f47345ddfaee
--- /dev/null
+++ b/distribution/tools/entitlement-agent/impl/src/main/java/module-info.java
@@ -0,0 +1,18 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+import org.elasticsearch.entitlement.instrumentation.InstrumentationService;
+import org.elasticsearch.entitlement.instrumentation.impl.InstrumentationServiceImpl;
+
+module org.elasticsearch.entitlement.agent.impl {
+    requires org.objectweb.asm;
+    requires org.elasticsearch.entitlement.agent;
+
+    provides InstrumentationService with InstrumentationServiceImpl;
+}
diff --git a/distribution/tools/entitlement-agent/impl/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java b/distribution/tools/entitlement-agent/impl/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java
new file mode 100644
index 000000000000..f5fe8d41c224
--- /dev/null
+++ b/distribution/tools/entitlement-agent/impl/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java
@@ -0,0 +1,41 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+package org.elasticsearch.entitlement.instrumentation.impl;
+
+import org.elasticsearch.entitlement.instrumentation.InstrumentationService;
+import org.elasticsearch.entitlement.instrumentation.Instrumenter;
+import org.elasticsearch.entitlement.instrumentation.MethodKey;
+import org.objectweb.asm.Type;
+
+import java.lang.reflect.Method;
+import java.lang.reflect.Modifier;
+import java.util.Map;
+import java.util.stream.Stream;
+
+public class InstrumentationServiceImpl implements InstrumentationService {
+    @Override
+    public Instrumenter newInstrumenter(String classNameSuffix, Map instrumentationMethods) {
+        return new InstrumenterImpl(classNameSuffix, instrumentationMethods);
+    }
+
+    /**
+     * @return a {@link MethodKey} suitable for looking up the given {@code targetMethod} in the entitlements trampoline
+     */
+    public MethodKey methodKeyForTarget(Method targetMethod) {
+        Type actualType = Type.getMethodType(Type.getMethodDescriptor(targetMethod));
+        return new MethodKey(
+            Type.getInternalName(targetMethod.getDeclaringClass()),
+            targetMethod.getName(),
+            Stream.of(actualType.getArgumentTypes()).map(Type::getInternalName).toList(),
+            Modifier.isStatic(targetMethod.getModifiers())
+        );
+    }
+
+}
diff --git a/distribution/tools/entitlement-agent/impl/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java b/distribution/tools/entitlement-agent/impl/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java
new file mode 100644
index 000000000000..81c120ddcd6d
--- /dev/null
+++ b/distribution/tools/entitlement-agent/impl/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java
@@ -0,0 +1,214 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+package org.elasticsearch.entitlement.instrumentation.impl;
+
+import org.elasticsearch.entitlement.instrumentation.Instrumenter;
+import org.elasticsearch.entitlement.instrumentation.MethodKey;
+import org.objectweb.asm.AnnotationVisitor;
+import org.objectweb.asm.ClassReader;
+import org.objectweb.asm.ClassVisitor;
+import org.objectweb.asm.ClassWriter;
+import org.objectweb.asm.MethodVisitor;
+import org.objectweb.asm.Opcodes;
+import org.objectweb.asm.Type;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.lang.reflect.Method;
+import java.util.Map;
+import java.util.stream.Stream;
+
+import static org.objectweb.asm.ClassWriter.COMPUTE_FRAMES;
+import static org.objectweb.asm.ClassWriter.COMPUTE_MAXS;
+import static org.objectweb.asm.Opcodes.ACC_STATIC;
+import static org.objectweb.asm.Opcodes.GETSTATIC;
+import static org.objectweb.asm.Opcodes.INVOKEINTERFACE;
+import static org.objectweb.asm.Opcodes.INVOKESTATIC;
+import static org.objectweb.asm.Opcodes.INVOKEVIRTUAL;
+
+public class InstrumenterImpl implements Instrumenter {
+    /**
+     * To avoid class name collisions during testing without an agent to replace classes in-place.
+     */
+    private final String classNameSuffix;
+    private final Map instrumentationMethods;
+
+    public InstrumenterImpl(String classNameSuffix, Map instrumentationMethods) {
+        this.classNameSuffix = classNameSuffix;
+        this.instrumentationMethods = instrumentationMethods;
+    }
+
+    public ClassFileInfo instrumentClassFile(Class clazz) throws IOException {
+        ClassFileInfo initial = getClassFileInfo(clazz);
+        return new ClassFileInfo(initial.fileName(), instrumentClass(Type.getInternalName(clazz), initial.bytecodes()));
+    }
+
+    public static ClassFileInfo getClassFileInfo(Class clazz) throws IOException {
+        String internalName = Type.getInternalName(clazz);
+        String fileName = "/" + internalName + ".class";
+        byte[] originalBytecodes;
+        try (InputStream classStream = clazz.getResourceAsStream(fileName)) {
+            if (classStream == null) {
+                throw new IllegalStateException("Classfile not found in jar: " + fileName);
+            }
+            originalBytecodes = classStream.readAllBytes();
+        }
+        return new ClassFileInfo(fileName, originalBytecodes);
+    }
+
+    @Override
+    public byte[] instrumentClass(String className, byte[] classfileBuffer) {
+        ClassReader reader = new ClassReader(classfileBuffer);
+        ClassWriter writer = new ClassWriter(reader, COMPUTE_FRAMES | COMPUTE_MAXS);
+        ClassVisitor visitor = new EntitlementClassVisitor(Opcodes.ASM9, writer, className);
+        reader.accept(visitor, 0);
+        return writer.toByteArray();
+    }
+
+    class EntitlementClassVisitor extends ClassVisitor {
+        final String className;
+
+        EntitlementClassVisitor(int api, ClassVisitor classVisitor, String className) {
+            super(api, classVisitor);
+            this.className = className;
+        }
+
+        @Override
+        public void visit(int version, int access, String name, String signature, String superName, String[] interfaces) {
+            super.visit(version, access, name + classNameSuffix, signature, superName, interfaces);
+        }
+
+        @Override
+        public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, String[] exceptions) {
+            var mv = super.visitMethod(access, name, descriptor, signature, exceptions);
+            boolean isStatic = (access & ACC_STATIC) != 0;
+            var key = new MethodKey(
+                className,
+                name,
+                Stream.of(Type.getArgumentTypes(descriptor)).map(Type::getInternalName).toList(),
+                isStatic
+            );
+            var instrumentationMethod = instrumentationMethods.get(key);
+            if (instrumentationMethod != null) {
+                // LOGGER.debug("Will instrument method {}", key);
+                return new EntitlementMethodVisitor(Opcodes.ASM9, mv, isStatic, descriptor, instrumentationMethod);
+            } else {
+                // LOGGER.trace("Will not instrument method {}", key);
+            }
+            return mv;
+        }
+    }
+
+    static class EntitlementMethodVisitor extends MethodVisitor {
+        private final boolean instrumentedMethodIsStatic;
+        private final String instrumentedMethodDescriptor;
+        private final Method instrumentationMethod;
+        private boolean hasCallerSensitiveAnnotation = false;
+
+        EntitlementMethodVisitor(
+            int api,
+            MethodVisitor methodVisitor,
+            boolean instrumentedMethodIsStatic,
+            String instrumentedMethodDescriptor,
+            Method instrumentationMethod
+        ) {
+            super(api, methodVisitor);
+            this.instrumentedMethodIsStatic = instrumentedMethodIsStatic;
+            this.instrumentedMethodDescriptor = instrumentedMethodDescriptor;
+            this.instrumentationMethod = instrumentationMethod;
+        }
+
+        @Override
+        public AnnotationVisitor visitAnnotation(String descriptor, boolean visible) {
+            if (visible && descriptor.endsWith("CallerSensitive;")) {
+                hasCallerSensitiveAnnotation = true;
+            }
+            return super.visitAnnotation(descriptor, visible);
+        }
+
+        @Override
+        public void visitCode() {
+            pushEntitlementChecksObject();
+            pushCallerClass();
+            forwardIncomingArguments();
+            invokeInstrumentationMethod();
+            super.visitCode();
+        }
+
+        private void pushEntitlementChecksObject() {
+            mv.visitMethodInsn(
+                INVOKESTATIC,
+                "org/elasticsearch/entitlement/api/EntitlementProvider",
+                "checks",
+                "()Lorg/elasticsearch/entitlement/api/EntitlementChecks;",
+                false
+            );
+        }
+
+        private void pushCallerClass() {
+            if (hasCallerSensitiveAnnotation) {
+                mv.visitMethodInsn(
+                    INVOKESTATIC,
+                    "jdk/internal/reflect/Reflection",
+                    "getCallerClass",
+                    Type.getMethodDescriptor(Type.getType(Class.class)),
+                    false
+                );
+            } else {
+                mv.visitFieldInsn(
+                    GETSTATIC,
+                    Type.getInternalName(StackWalker.Option.class),
+                    "RETAIN_CLASS_REFERENCE",
+                    Type.getDescriptor(StackWalker.Option.class)
+                );
+                mv.visitMethodInsn(
+                    INVOKESTATIC,
+                    Type.getInternalName(StackWalker.class),
+                    "getInstance",
+                    Type.getMethodDescriptor(Type.getType(StackWalker.class), Type.getType(StackWalker.Option.class)),
+                    false
+                );
+                mv.visitMethodInsn(
+                    INVOKEVIRTUAL,
+                    Type.getInternalName(StackWalker.class),
+                    "getCallerClass",
+                    Type.getMethodDescriptor(Type.getType(Class.class)),
+                    false
+                );
+            }
+        }
+
+        private void forwardIncomingArguments() {
+            int localVarIndex = 0;
+            if (instrumentedMethodIsStatic == false) {
+                mv.visitVarInsn(Opcodes.ALOAD, localVarIndex++);
+            }
+            for (Type type : Type.getArgumentTypes(instrumentedMethodDescriptor)) {
+                mv.visitVarInsn(type.getOpcode(Opcodes.ILOAD), localVarIndex);
+                localVarIndex += type.getSize();
+            }
+
+        }
+
+        private void invokeInstrumentationMethod() {
+            mv.visitMethodInsn(
+                INVOKEINTERFACE,
+                Type.getInternalName(instrumentationMethod.getDeclaringClass()),
+                instrumentationMethod.getName(),
+                Type.getMethodDescriptor(instrumentationMethod),
+                true
+            );
+        }
+    }
+
+    // private static final Logger LOGGER = LogManager.getLogger(Instrumenter.class);
+
+    public record ClassFileInfo(String fileName, byte[] bytecodes) {}
+}
diff --git a/distribution/tools/entitlement-agent/impl/src/main/resources/META-INF/services/org.elasticsearch.entitlement.instrumentation.InstrumentationService b/distribution/tools/entitlement-agent/impl/src/main/resources/META-INF/services/org.elasticsearch.entitlement.instrumentation.InstrumentationService
new file mode 100644
index 000000000000..da2e1bc1e67b
--- /dev/null
+++ b/distribution/tools/entitlement-agent/impl/src/main/resources/META-INF/services/org.elasticsearch.entitlement.instrumentation.InstrumentationService
@@ -0,0 +1,10 @@
+#
+ # Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ # or more contributor license agreements. Licensed under the "Elastic License
+ # 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ # Public License v 1"; you may not use this file except in compliance with, at
+ # your election, the "Elastic License 2.0", the "GNU Affero General Public
+ # License v3.0 only", or the "Server Side Public License, v 1".
+#
+
+org.elasticsearch.entitlement.instrumentation.impl.InstrumentationServiceImpl
diff --git a/distribution/tools/entitlement-agent/src/main/java/module-info.java b/distribution/tools/entitlement-agent/src/main/java/module-info.java
index df6fc154fc67..0eb87aeee3f6 100644
--- a/distribution/tools/entitlement-agent/src/main/java/module-info.java
+++ b/distribution/tools/entitlement-agent/src/main/java/module-info.java
@@ -7,7 +7,13 @@
  * License v3.0 only", or the "Server Side Public License, v 1".
  */
 
+import org.elasticsearch.entitlement.instrumentation.InstrumentationService;
+
 module org.elasticsearch.entitlement.agent {
     requires java.instrument;
-    requires org.elasticsearch.entitlement.runtime;
+    requires org.elasticsearch.base; // for @SuppressForbidden
+
+    exports org.elasticsearch.entitlement.instrumentation to org.elasticsearch.entitlement.agent.impl;
+
+    uses InstrumentationService;
 }
diff --git a/distribution/tools/entitlement-agent/src/main/java/org/elasticsearch/entitlement/agent/EntitlementAgent.java b/distribution/tools/entitlement-agent/src/main/java/org/elasticsearch/entitlement/agent/EntitlementAgent.java
index b843e42f4a03..acb11af97bb5 100644
--- a/distribution/tools/entitlement-agent/src/main/java/org/elasticsearch/entitlement/agent/EntitlementAgent.java
+++ b/distribution/tools/entitlement-agent/src/main/java/org/elasticsearch/entitlement/agent/EntitlementAgent.java
@@ -9,13 +9,53 @@
 
 package org.elasticsearch.entitlement.agent;
 
-import org.elasticsearch.entitlement.runtime.api.EntitlementChecks;
+import org.elasticsearch.core.SuppressForbidden;
+import org.elasticsearch.core.internal.provider.ProviderLocator;
+import org.elasticsearch.entitlement.instrumentation.InstrumentationService;
+import org.elasticsearch.entitlement.instrumentation.MethodKey;
 
+import java.io.IOException;
 import java.lang.instrument.Instrumentation;
+import java.lang.reflect.Method;
+import java.util.Map;
+import java.util.Set;
+import java.util.jar.JarFile;
 
 public class EntitlementAgent {
 
     public static void premain(String agentArgs, Instrumentation inst) throws Exception {
-        EntitlementChecks.setAgentBooted();
+        // Add the bridge library (the one with the entitlement checking interface) to the bootstrap classpath.
+        // We can't actually reference the classes here for real before this point because they won't resolve.
+        var bridgeJarName = System.getProperty("es.entitlements.bridgeJar");
+        if (bridgeJarName == null) {
+            throw new IllegalArgumentException("System property es.entitlements.bridgeJar is required");
+        }
+        addJarToBootstrapClassLoader(inst, bridgeJarName);
+
+        Method targetMethod = System.class.getMethod("exit", int.class);
+        Method instrumentationMethod = Class.forName("org.elasticsearch.entitlement.api.EntitlementChecks")
+            .getMethod("checkSystemExit", Class.class, int.class);
+        Map methodMap = Map.of(INSTRUMENTER_FACTORY.methodKeyForTarget(targetMethod), instrumentationMethod);
+
+        inst.addTransformer(new Transformer(INSTRUMENTER_FACTORY.newInstrumenter("", methodMap), Set.of(internalName(System.class))), true);
+        inst.retransformClasses(System.class);
     }
+
+    @SuppressForbidden(reason = "The appendToBootstrapClassLoaderSearch method takes a JarFile")
+    private static void addJarToBootstrapClassLoader(Instrumentation inst, String jarString) throws IOException {
+        inst.appendToBootstrapClassLoaderSearch(new JarFile(jarString));
+    }
+
+    private static String internalName(Class c) {
+        return c.getName().replace('.', '/');
+    }
+
+    private static final InstrumentationService INSTRUMENTER_FACTORY = (new ProviderLocator<>(
+        "entitlement-agent",
+        InstrumentationService.class,
+        "org.elasticsearch.entitlement.agent.impl",
+        Set.of("org.objectweb.nonexistent.asm")
+    )).get();
+
+    // private static final Logger LOGGER = LogManager.getLogger(EntitlementAgent.class);
 }
diff --git a/distribution/tools/entitlement-agent/src/main/java/org/elasticsearch/entitlement/agent/Transformer.java b/distribution/tools/entitlement-agent/src/main/java/org/elasticsearch/entitlement/agent/Transformer.java
new file mode 100644
index 000000000000..bd9bb5bf2e5c
--- /dev/null
+++ b/distribution/tools/entitlement-agent/src/main/java/org/elasticsearch/entitlement/agent/Transformer.java
@@ -0,0 +1,49 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+package org.elasticsearch.entitlement.agent;
+
+import org.elasticsearch.entitlement.instrumentation.Instrumenter;
+
+import java.lang.instrument.ClassFileTransformer;
+import java.security.ProtectionDomain;
+import java.util.Set;
+
+/**
+ * A {@link ClassFileTransformer} that applies an {@link Instrumenter} to the appropriate classes.
+ */
+public class Transformer implements ClassFileTransformer {
+    private final Instrumenter instrumenter;
+    private final Set classesToTransform;
+
+    public Transformer(Instrumenter instrumenter, Set classesToTransform) {
+        this.instrumenter = instrumenter;
+        this.classesToTransform = classesToTransform;
+        // TODO: Should warn if any MethodKey doesn't match any methods
+    }
+
+    @Override
+    public byte[] transform(
+        ClassLoader loader,
+        String className,
+        Class classBeingRedefined,
+        ProtectionDomain protectionDomain,
+        byte[] classfileBuffer
+    ) {
+        if (classesToTransform.contains(className)) {
+            // System.out.println("Transforming " + className);
+            return instrumenter.instrumentClass(className, classfileBuffer);
+        } else {
+            // System.out.println("Not transforming " + className);
+            return classfileBuffer;
+        }
+    }
+
+    // private static final Logger LOGGER = LogManager.getLogger(Transformer.class);
+}
diff --git a/distribution/tools/entitlement-agent/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java b/distribution/tools/entitlement-agent/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java
new file mode 100644
index 000000000000..25fa84ec7c4b
--- /dev/null
+++ b/distribution/tools/entitlement-agent/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+package org.elasticsearch.entitlement.instrumentation;
+
+import java.lang.reflect.Method;
+import java.util.Map;
+
+/**
+ * The SPI service entry point for instrumentation.
+ */
+public interface InstrumentationService {
+    Instrumenter newInstrumenter(String classNameSuffix, Map instrumentationMethods);
+
+    /**
+     * @return a {@link MethodKey} suitable for looking up the given {@code targetMethod} in the entitlements trampoline
+     */
+    MethodKey methodKeyForTarget(Method targetMethod);
+}
diff --git a/distribution/tools/entitlement-agent/src/main/java/org/elasticsearch/entitlement/instrumentation/Instrumenter.java b/distribution/tools/entitlement-agent/src/main/java/org/elasticsearch/entitlement/instrumentation/Instrumenter.java
new file mode 100644
index 000000000000..9f39cbbbd0df
--- /dev/null
+++ b/distribution/tools/entitlement-agent/src/main/java/org/elasticsearch/entitlement/instrumentation/Instrumenter.java
@@ -0,0 +1,14 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+package org.elasticsearch.entitlement.instrumentation;
+
+public interface Instrumenter {
+    byte[] instrumentClass(String className, byte[] classfileBuffer);
+}
diff --git a/distribution/tools/entitlement-agent/src/main/java/org/elasticsearch/entitlement/instrumentation/MethodKey.java b/distribution/tools/entitlement-agent/src/main/java/org/elasticsearch/entitlement/instrumentation/MethodKey.java
new file mode 100644
index 000000000000..54e09c10bcc5
--- /dev/null
+++ b/distribution/tools/entitlement-agent/src/main/java/org/elasticsearch/entitlement/instrumentation/MethodKey.java
@@ -0,0 +1,18 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+package org.elasticsearch.entitlement.instrumentation;
+
+import java.util.List;
+
+/**
+ *
+ * @param className the "internal name" of the class: includes the package info, but with periods replaced by slashes
+ */
+public record MethodKey(String className, String methodName, List parameterTypes, boolean isStatic) {}
diff --git a/distribution/tools/entitlement-agent/src/test/java/org/elasticsearch/entitlement/agent/EntitlementAgentTests.java b/distribution/tools/entitlement-agent/src/test/java/org/elasticsearch/entitlement/agent/EntitlementAgentTests.java
index 3927465570c9..bb775d302c1d 100644
--- a/distribution/tools/entitlement-agent/src/test/java/org/elasticsearch/entitlement/agent/EntitlementAgentTests.java
+++ b/distribution/tools/entitlement-agent/src/test/java/org/elasticsearch/entitlement/agent/EntitlementAgentTests.java
@@ -9,21 +9,40 @@
 
 package org.elasticsearch.entitlement.agent;
 
-import org.elasticsearch.entitlement.runtime.api.EntitlementChecks;
+import com.carrotsearch.randomizedtesting.annotations.SuppressForbidden;
+
+import org.elasticsearch.entitlement.runtime.api.ElasticsearchEntitlementManager;
+import org.elasticsearch.entitlement.runtime.api.NotEntitledException;
+import org.elasticsearch.entitlement.runtime.internals.EntitlementInternals;
 import org.elasticsearch.test.ESTestCase;
 import org.elasticsearch.test.ESTestCase.WithoutSecurityManager;
+import org.junit.After;
 
 /**
- * This is an end-to-end test that runs with the javaagent installed.
- * It should exhaustively test every instrumented method to make sure it passes with the entitlement
- * and fails without it.
+ * This is an end-to-end test of the agent and entitlement runtime.
+ * It runs with the agent installed, and exhaustively tests every instrumented method
+ * to make sure it works with the entitlement granted and throws without it.
+ * The only exception is {@link System#exit}, where we can't that it works without
+ * terminating the JVM.
  * See {@code build.gradle} for how we set the command line arguments for this test.
  */
 @WithoutSecurityManager
 public class EntitlementAgentTests extends ESTestCase {
 
-    public void testAgentBooted() {
-        assertTrue(EntitlementChecks.isAgentBooted());
+    public static final ElasticsearchEntitlementManager ENTITLEMENT_MANAGER = ElasticsearchEntitlementManager.get();
+
+    @After
+    public void resetEverything() {
+        EntitlementInternals.reset();
+    }
+
+    /**
+     * We can't really check that this one passes because it will just exit the JVM.
+     */
+    @SuppressForbidden("Specifically testing System.exit")
+    public void testSystemExitNotEntitled() {
+        ENTITLEMENT_MANAGER.activate();
+        assertThrows(NotEntitledException.class, () -> System.exit(123));
     }
 
 }
diff --git a/distribution/tools/entitlement-bridge/README.md b/distribution/tools/entitlement-bridge/README.md
new file mode 100644
index 000000000000..7204d1d2c98c
--- /dev/null
+++ b/distribution/tools/entitlement-bridge/README.md
@@ -0,0 +1,11 @@
+### Entitlement Bridge
+
+This is the code called directly from instrumented methods.
+It's a minimal code stub that is loaded into the boot classloader by the entitlement agent
+so that it is callable from the class library methods instrumented by the agent.
+Its job is to forward the entitlement checks to the actual runtime library,
+which is loaded normally.
+
+It is not responsible for injecting the bytecode instrumentation (that's the agent)
+nor for implementing the permission checks (that's the runtime library).
+
diff --git a/distribution/tools/entitlement-bridge/build.gradle b/distribution/tools/entitlement-bridge/build.gradle
new file mode 100644
index 000000000000..29969a862964
--- /dev/null
+++ b/distribution/tools/entitlement-bridge/build.gradle
@@ -0,0 +1,18 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+apply plugin: 'elasticsearch.build'
+
+dependencies {
+}
+
+tasks.named('forbiddenApisMain').configure {
+  replaceSignatureFiles 'jdk-signatures'
+}
+
diff --git a/libs/entitlement-runtime/src/main/java/module-info.java b/distribution/tools/entitlement-bridge/src/main/java/module-info.java
similarity index 72%
rename from libs/entitlement-runtime/src/main/java/module-info.java
rename to distribution/tools/entitlement-bridge/src/main/java/module-info.java
index 13849f0658d7..7091ae34ce1e 100644
--- a/libs/entitlement-runtime/src/main/java/module-info.java
+++ b/distribution/tools/entitlement-bridge/src/main/java/module-info.java
@@ -7,8 +7,8 @@
  * License v3.0 only", or the "Server Side Public License, v 1".
  */
 
-module org.elasticsearch.entitlement.runtime {
-    requires org.elasticsearch.base;
+module org.elasticsearch.entitlement.bridge {
+    uses org.elasticsearch.entitlement.api.EntitlementChecks;
 
-    exports org.elasticsearch.entitlement.runtime.api to org.elasticsearch.entitlement.agent;
+    exports org.elasticsearch.entitlement.api;
 }
diff --git a/distribution/tools/entitlement-bridge/src/main/java/org/elasticsearch/entitlement/api/EntitlementChecks.java b/distribution/tools/entitlement-bridge/src/main/java/org/elasticsearch/entitlement/api/EntitlementChecks.java
new file mode 100644
index 000000000000..b45313eb018a
--- /dev/null
+++ b/distribution/tools/entitlement-bridge/src/main/java/org/elasticsearch/entitlement/api/EntitlementChecks.java
@@ -0,0 +1,14 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+package org.elasticsearch.entitlement.api;
+
+public interface EntitlementChecks {
+    void checkSystemExit(Class callerClass, int status);
+}
diff --git a/distribution/tools/entitlement-bridge/src/main/java/org/elasticsearch/entitlement/api/EntitlementProvider.java b/distribution/tools/entitlement-bridge/src/main/java/org/elasticsearch/entitlement/api/EntitlementProvider.java
new file mode 100644
index 000000000000..bc10adcd086e
--- /dev/null
+++ b/distribution/tools/entitlement-bridge/src/main/java/org/elasticsearch/entitlement/api/EntitlementProvider.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+package org.elasticsearch.entitlement.api;
+
+import java.util.List;
+import java.util.ServiceLoader;
+
+public class EntitlementProvider {
+    private static final EntitlementChecks CHECKS = lookupEntitlementChecksImplementation();
+
+    public static EntitlementChecks checks() {
+        return CHECKS;
+    }
+
+    private static EntitlementChecks lookupEntitlementChecksImplementation() {
+        List candidates = ServiceLoader.load(EntitlementChecks.class).stream().map(ServiceLoader.Provider::get).toList();
+        if (candidates.isEmpty()) {
+            throw new IllegalStateException("No EntitlementChecks service");
+        } else if (candidates.size() >= 2) {
+            throw new IllegalStateException(
+                "Multiple EntitlementChecks services: " + candidates.stream().map(e -> e.getClass().getSimpleName()).toList()
+            );
+        } else {
+            return candidates.get(0);
+        }
+    }
+}
diff --git a/libs/entitlement-runtime/README.md b/distribution/tools/entitlement-runtime/README.md
similarity index 51%
rename from libs/entitlement-runtime/README.md
rename to distribution/tools/entitlement-runtime/README.md
index 49cbc873c9de..3e064705c3ae 100644
--- a/libs/entitlement-runtime/README.md
+++ b/distribution/tools/entitlement-runtime/README.md
@@ -5,10 +5,3 @@ This module implements mechanisms to grant and check permissions under the _enti
 The entitlements system provides an alternative to the legacy `SecurityManager` system, which is deprecated for removal.
 The `entitlement-agent` tool instruments sensitive class library methods with calls to this module, in order to enforce the controls.
 
-This module is responsible for:
-- Defining which class library methods are sensitive
-- Defining what permissions should be checked for each sensitive method
-- Implementing the permission checks
-- Offering a "grant" API to grant permissions
-
-It is not responsible for anything to do with bytecode instrumentation; that responsibility lies with `entitlement-agent`.
diff --git a/libs/entitlement-runtime/build.gradle b/distribution/tools/entitlement-runtime/build.gradle
similarity index 76%
rename from libs/entitlement-runtime/build.gradle
rename to distribution/tools/entitlement-runtime/build.gradle
index a552dd7d5ba4..0fb7bdec883f 100644
--- a/libs/entitlement-runtime/build.gradle
+++ b/distribution/tools/entitlement-runtime/build.gradle
@@ -10,7 +10,9 @@ apply plugin: 'elasticsearch.build'
 apply plugin: 'elasticsearch.publish'
 
 dependencies {
-  compileOnly project(':libs:elasticsearch-core')
+  compileOnly project(':libs:elasticsearch-core') // For @SuppressForbidden
+  compileOnly project(':server') // To access the main server module for special permission checks
+  compileOnly project(':distribution:tools:entitlement-bridge')
 
   testImplementation project(":test:framework")
 }
diff --git a/distribution/tools/entitlement-runtime/src/main/java/module-info.java b/distribution/tools/entitlement-runtime/src/main/java/module-info.java
new file mode 100644
index 000000000000..d0bfc804f802
--- /dev/null
+++ b/distribution/tools/entitlement-runtime/src/main/java/module-info.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+module org.elasticsearch.entitlement.runtime {
+    requires org.elasticsearch.entitlement.bridge;
+    requires org.elasticsearch.server;
+
+    exports org.elasticsearch.entitlement.runtime.api;
+
+    provides org.elasticsearch.entitlement.api.EntitlementChecks
+        with
+            org.elasticsearch.entitlement.runtime.api.ElasticsearchEntitlementManager;
+}
diff --git a/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementManager.java b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementManager.java
new file mode 100644
index 000000000000..a80d412f5dbd
--- /dev/null
+++ b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementManager.java
@@ -0,0 +1,77 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+package org.elasticsearch.entitlement.runtime.api;
+
+import org.elasticsearch.entitlement.api.EntitlementChecks;
+import org.elasticsearch.entitlement.api.EntitlementProvider;
+
+import java.util.Optional;
+
+import static org.elasticsearch.entitlement.runtime.internals.EntitlementInternals.isActive;
+
+/**
+ * Implementation of the {@link EntitlementChecks} interface, providing additional
+ * API methods for managing the checks.
+ * The trampoline module loads this object via SPI.
+ */
+public class ElasticsearchEntitlementManager implements EntitlementChecks {
+    /**
+     * @return the same instance of {@link ElasticsearchEntitlementManager} returned by {@link EntitlementProvider}.
+     */
+    public static ElasticsearchEntitlementManager get() {
+        return (ElasticsearchEntitlementManager) EntitlementProvider.checks();
+    }
+
+    /**
+     * Causes entitlements to be enforced.
+     */
+    public void activate() {
+        isActive = true;
+    }
+
+    @Override
+    public void checkSystemExit(Class callerClass, int status) {
+        var requestingModule = requestingModule(callerClass);
+        if (isTriviallyAllowed(requestingModule)) {
+            // System.out.println(" - Trivially allowed");
+            return;
+        }
+        // Hard-forbidden until we develop the permission granting scheme
+        throw new NotEntitledException("Missing entitlement for " + requestingModule);
+    }
+
+    private static Module requestingModule(Class callerClass) {
+        if (callerClass != null) {
+            Module callerModule = callerClass.getModule();
+            if (callerModule.getLayer() != ModuleLayer.boot()) {
+                // fast path
+                return callerModule;
+            }
+        }
+        int framesToSkip = 1  // getCallingClass (this method)
+            + 1  // the checkXxx method
+            + 1  // the runtime config method
+            + 1  // the instrumented method
+        ;
+        Optional module = StackWalker.getInstance(StackWalker.Option.RETAIN_CLASS_REFERENCE)
+            .walk(
+                s -> s.skip(framesToSkip)
+                    .map(f -> f.getDeclaringClass().getModule())
+                    .filter(m -> m.getLayer() != ModuleLayer.boot())
+                    .findFirst()
+            );
+        return module.orElse(null);
+    }
+
+    private static boolean isTriviallyAllowed(Module requestingModule) {
+        return isActive == false || (requestingModule == null) || requestingModule == System.class.getModule();
+    }
+
+}
diff --git a/libs/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/api/EntitlementChecks.java b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/api/NotEntitledException.java
similarity index 69%
rename from libs/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/api/EntitlementChecks.java
rename to distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/api/NotEntitledException.java
index c06e1e5b1f85..5afffc84f77a 100644
--- a/libs/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/api/EntitlementChecks.java
+++ b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/api/NotEntitledException.java
@@ -9,14 +9,12 @@
 
 package org.elasticsearch.entitlement.runtime.api;
 
-public class EntitlementChecks {
-    static boolean isAgentBooted = false;
-
-    public static void setAgentBooted() {
-        isAgentBooted = true;
+public class NotEntitledException extends RuntimeException {
+    public NotEntitledException(String message) {
+        super(message);
     }
 
-    public static boolean isAgentBooted() {
-        return isAgentBooted;
+    public NotEntitledException(String message, Throwable cause) {
+        super(message, cause);
     }
 }
diff --git a/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/internals/EntitlementInternals.java b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/internals/EntitlementInternals.java
new file mode 100644
index 000000000000..ea83caf198b0
--- /dev/null
+++ b/distribution/tools/entitlement-runtime/src/main/java/org/elasticsearch/entitlement/runtime/internals/EntitlementInternals.java
@@ -0,0 +1,24 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the "Elastic License
+ * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ * Public License v 1"; you may not use this file except in compliance with, at
+ * your election, the "Elastic License 2.0", the "GNU Affero General Public
+ * License v3.0 only", or the "Server Side Public License, v 1".
+ */
+
+package org.elasticsearch.entitlement.runtime.internals;
+
+/**
+ * Don't export this from the module. Just don't.
+ */
+public class EntitlementInternals {
+    /**
+     * When false, entitlement rules are not enforced; all operations are allowed.
+     */
+    public static volatile boolean isActive = false;
+
+    public static void reset() {
+        isActive = false;
+    }
+}
diff --git a/distribution/tools/entitlement-runtime/src/main/resources/META-INF/services/org.elasticsearch.entitlement.api.EntitlementChecks b/distribution/tools/entitlement-runtime/src/main/resources/META-INF/services/org.elasticsearch.entitlement.api.EntitlementChecks
new file mode 100644
index 000000000000..5865e43e2b85
--- /dev/null
+++ b/distribution/tools/entitlement-runtime/src/main/resources/META-INF/services/org.elasticsearch.entitlement.api.EntitlementChecks
@@ -0,0 +1,10 @@
+#
+ # Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ # or more contributor license agreements. Licensed under the "Elastic License
+ # 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
+ # Public License v 1"; you may not use this file except in compliance with, at
+ # your election, the "Elastic License 2.0", the "GNU Affero General Public
+ # License v3.0 only", or the "Server Side Public License, v 1".
+#
+
+org.elasticsearch.entitlement.runtime.api.ElasticsearchEntitlementManager
diff --git a/libs/core/src/main/java/module-info.java b/libs/core/src/main/java/module-info.java
index 8f671f243b16..3c5d787ab8ba 100644
--- a/libs/core/src/main/java/module-info.java
+++ b/libs/core/src/main/java/module-info.java
@@ -15,7 +15,11 @@ module org.elasticsearch.base {
 
     exports org.elasticsearch.core;
     exports org.elasticsearch.jdk;
-    exports org.elasticsearch.core.internal.provider to org.elasticsearch.xcontent, org.elasticsearch.nativeaccess;
+    exports org.elasticsearch.core.internal.provider
+        to
+            org.elasticsearch.xcontent,
+            org.elasticsearch.nativeaccess,
+            org.elasticsearch.entitlement.agent;
 
     uses ModuleQualifiedExportsService;
 }
diff --git a/settings.gradle b/settings.gradle
index 6767ce4a3e3c..a47751fd499c 100644
--- a/settings.gradle
+++ b/settings.gradle
@@ -94,6 +94,9 @@ List projects = [
   'distribution:tools:geoip-cli',
   'distribution:tools:ansi-console',
   'distribution:tools:entitlement-agent',
+  'distribution:tools:entitlement-agent:impl',
+  'distribution:tools:entitlement-bridge',
+  'distribution:tools:entitlement-runtime',
   'server',
   'test:framework',
   'test:fixtures:azure-fixture',

From 683bf429174e4d7b9ffb8f859e1e2e7c8e2798d2 Mon Sep 17 00:00:00 2001
From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com>
Date: Wed, 9 Oct 2024 16:55:12 +0300
Subject: [PATCH 83/85] Re-enable
 ScaledFloatFieldMapperTests.testSyntheticSourceKeepArrays (#114408)

---
 muted-tests.yml | 3 ---
 1 file changed, 3 deletions(-)

diff --git a/muted-tests.yml b/muted-tests.yml
index 22847e754ab9..ef27eeeffc14 100644
--- a/muted-tests.yml
+++ b/muted-tests.yml
@@ -377,9 +377,6 @@ tests:
 - class: org.elasticsearch.xpack.inference.services.cohere.CohereServiceTests
   method: testInfer_StreamRequest
   issue: https://github.com/elastic/elasticsearch/issues/114385
-- class: org.elasticsearch.index.mapper.extras.ScaledFloatFieldMapperTests
-  method: testSyntheticSourceKeepArrays
-  issue: https://github.com/elastic/elasticsearch/issues/114406
 
 # Examples:
 #

From 3e06625fd79edc30fb1fb3a49e56d88297d14249 Mon Sep 17 00:00:00 2001
From: Nikolaj Volgushev 
Date: Wed, 9 Oct 2024 16:53:59 +0200
Subject: [PATCH 84/85] Default enable cluster state role mapper (#114337)

This PR default-enables cluster-state role mappings as the first part of the mitigation for a regression in ECK introduced by https://github.com/elastic/elasticsearch/pull/107410.

Prior to this PR, cluster-state role mappings were written to cluster-state, but not read from it.

With this PR, cluster-state role mappings will be read and used to assign roles to users, i.e. in user role resolution.

However, they will not be included in the output of the [Get role mappings API](https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-role-mapping.html) yet. Exposing them via API is a target for a follow-up fix.

Relates: ES-9628
Supersedes: https://github.com/elastic/elasticsearch/pull/113900
---
 docs/changelog/114337.yaml                               | 5 +++++
 .../integration/RoleMappingFileSettingsIT.java           | 9 ---------
 .../security/authc/jwt/JwtRoleMappingsIntegTests.java    | 2 --
 .../authc/support/mapper/ClusterStateRoleMapper.java     | 8 ++++----
 .../org/elasticsearch/test/SecuritySettingsSource.java   | 2 +-
 .../support/mapper/ClusterStateRoleMapperTests.java      | 8 ++++----
 6 files changed, 14 insertions(+), 20 deletions(-)
 create mode 100644 docs/changelog/114337.yaml

diff --git a/docs/changelog/114337.yaml b/docs/changelog/114337.yaml
new file mode 100644
index 000000000000..ec55be8bb179
--- /dev/null
+++ b/docs/changelog/114337.yaml
@@ -0,0 +1,5 @@
+pr: 114337
+summary: "Enables cluster state role mapper, to include ECK operator-defined role mappings in role resolution"
+area: Authentication
+type: bug
+issues: []
diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java
index 38dd7116acce..778d88d83288 100644
--- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java
+++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java
@@ -148,15 +148,6 @@ public class RoleMappingFileSettingsIT extends NativeRealmIntegTestCase {
              }
         }""";
 
-    @Override
-    protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) {
-        Settings.Builder builder = Settings.builder()
-            .put(super.nodeSettings(nodeOrdinal, otherSettings))
-            // some tests make use of cluster-state based role mappings
-            .put("xpack.security.authc.cluster_state_role_mappings.enabled", true);
-        return builder.build();
-    }
-
     @After
     public void cleanUp() {
         updateClusterSettings(Settings.builder().putNull("indices.recovery.max_bytes_per_sec"));
diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtRoleMappingsIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtRoleMappingsIntegTests.java
index 0a4a379e3a06..77fe4a896eed 100644
--- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtRoleMappingsIntegTests.java
+++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtRoleMappingsIntegTests.java
@@ -78,8 +78,6 @@ public final class JwtRoleMappingsIntegTests extends SecurityIntegTestCase {
     protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) {
         Settings.Builder builder = Settings.builder()
             .put(super.nodeSettings(nodeOrdinal, otherSettings))
-            // some tests make use of cluster-state based role mappings
-            .put("xpack.security.authc.cluster_state_role_mappings.enabled", true)
             .put(XPackSettings.TOKEN_SERVICE_ENABLED_SETTING.getKey(), randomBoolean())
             // 1st JWT realm
             .put("xpack.security.authc.realms.jwt.jwt0.order", 10)
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapper.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapper.java
index 9a6e9e75c468..5dea6a938263 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapper.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapper.java
@@ -31,9 +31,9 @@ import static org.elasticsearch.xpack.core.security.SecurityExtension.SecurityCo
 public final class ClusterStateRoleMapper extends AbstractRoleMapperClearRealmCache implements ClusterStateListener {
 
     /**
-     * This setting is never registered by the xpack security plugin - in order to enable the
+     * This setting is never registered by the xpack security plugin - in order to disable the
      * cluster-state based role mapper another plugin must register it as a boolean setting
-     * and set it to `true`.
+     * and set it to `false`.
      * If this setting is set to true then:
      * 
    *
  • Realms that make use role mappings (all realms but file and native) will, @@ -54,8 +54,8 @@ public final class ClusterStateRoleMapper extends AbstractRoleMapperClearRealmCa public ClusterStateRoleMapper(Settings settings, ScriptService scriptService, ClusterService clusterService) { this.scriptService = scriptService; this.clusterService = clusterService; - // this role mapper is disabled by default and only code in other plugins can enable it - this.enabled = settings.getAsBoolean(CLUSTER_STATE_ROLE_MAPPINGS_ENABLED, false); + // this role mapper is enabled by default and only code in other plugins can disable it + this.enabled = settings.getAsBoolean(CLUSTER_STATE_ROLE_MAPPINGS_ENABLED, true); if (this.enabled) { clusterService.addListener(this); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java index 6d7817db8ec0..ce5aaacdb92b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java @@ -403,7 +403,7 @@ public class SecuritySettingsSource extends NodeConfigurationSource { ); public static final Setting CLUSTER_STATE_ROLE_MAPPINGS_ENABLED = Setting.boolSetting( "xpack.security.authc.cluster_state_role_mappings.enabled", - false, + true, Setting.Property.NodeScope ); public static final Setting NATIVE_ROLES_ENABLED = Setting.boolSetting( diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapperTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapperTests.java index 74221c113327..cc6bc3a839fd 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapperTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/ClusterStateRoleMapperTests.java @@ -56,12 +56,12 @@ public class ClusterStateRoleMapperTests extends ESTestCase { () -> 1L ); clusterService = mock(ClusterService.class); - enabledSettings = Settings.builder().put("xpack.security.authc.cluster_state_role_mappings.enabled", true).build(); + disabledSettings = Settings.builder().put("xpack.security.authc.cluster_state_role_mappings.enabled", false).build(); if (randomBoolean()) { - disabledSettings = Settings.builder().put("xpack.security.authc.cluster_state_role_mappings.enabled", false).build(); + enabledSettings = Settings.builder().put("xpack.security.authc.cluster_state_role_mappings.enabled", true).build(); } else { - // the cluster state role mapper is disabled by default - disabledSettings = Settings.EMPTY; + // the cluster state role mapper is enabled by default + enabledSettings = Settings.EMPTY; } } From 21e3a17e5b70a26c6703590d10bd4305ca81425b Mon Sep 17 00:00:00 2001 From: Iraklis Psaroudakis Date: Wed, 9 Oct 2024 18:37:24 +0300 Subject: [PATCH 85/85] Ensure green step in synonyms rule yaml test (#114400) Fixes test issue serverless 2922. --- .../rest-api-spec/test/synonyms/60_synonym_rule_get.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/60_synonym_rule_get.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/60_synonym_rule_get.yml index 79c666e44661..0a4a32448666 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/60_synonym_rule_get.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/60_synonym_rule_get.yml @@ -13,7 +13,10 @@ setup: id: "test-id-2" - synonyms: "test => check" id: "test-id-3" - + - do: + cluster.health: + index: .synonyms-2 + wait_for_status: green --- "Get a synonym rule":