diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index c375db26f9bb..d1db48d392d9 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -56,7 +56,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["8.15.6", "8.16.2", "8.17.0", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.16.2", "8.17.0", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.template.yml b/.buildkite/pipelines/periodic-packaging.template.yml index aff0add62a2b..b00ef51ec8f0 100644 --- a/.buildkite/pipelines/periodic-packaging.template.yml +++ b/.buildkite/pipelines/periodic-packaging.template.yml @@ -18,6 +18,7 @@ steps: - rhel-8 - rhel-9 - almalinux-8 + - almalinux-9 agents: provider: gcp image: family/elasticsearch-{{matrix.image}} diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 9bcd61ac1273..2fbcd075b971 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -19,6 +19,7 @@ steps: - rhel-8 - rhel-9 - almalinux-8 + - almalinux-9 agents: provider: gcp image: family/elasticsearch-{{matrix.image}} @@ -268,8 +269,8 @@ steps: env: BWC_VERSION: 8.14.3 - - label: "{{matrix.image}} / 8.15.6 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.15.6 + - label: "{{matrix.image}} / 8.15.5 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.15.5 timeout_in_minutes: 300 matrix: setup: @@ -282,7 +283,7 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.15.6 + BWC_VERSION: 8.15.5 - label: "{{matrix.image}} / 8.16.2 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.16.2 diff --git a/.buildkite/pipelines/periodic-platform-support.yml b/.buildkite/pipelines/periodic-platform-support.yml index 8bee3a78f831..33e9a040b22c 100644 --- a/.buildkite/pipelines/periodic-platform-support.yml +++ b/.buildkite/pipelines/periodic-platform-support.yml @@ -18,6 +18,7 @@ steps: - rhel-8 - rhel-9 - almalinux-8 + - almalinux-9 agents: provider: gcp image: family/elasticsearch-{{matrix.image}} diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 3d6095d0b9e6..94c9020a794a 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -287,8 +287,8 @@ steps: - signal_reason: agent_stop limit: 3 - - label: 8.15.6 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.15.6#bwcTest + - label: 8.15.5 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.15.5#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -297,7 +297,7 @@ steps: buildDirectory: /dev/shm/bk preemptible: true env: - BWC_VERSION: 8.15.6 + BWC_VERSION: 8.15.5 retry: automatic: - exit_status: "-1" @@ -448,7 +448,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk21 - BWC_VERSION: ["8.15.6", "8.16.2", "8.17.0", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.16.2", "8.17.0", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -490,7 +490,7 @@ steps: ES_RUNTIME_JAVA: - openjdk21 - openjdk23 - BWC_VERSION: ["8.15.6", "8.16.2", "8.17.0", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.16.2", "8.17.0", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/pull-request/packaging-tests-unix.yml b/.buildkite/pipelines/pull-request/packaging-tests-unix.yml index 6c7dadfd454e..3b7973d05a33 100644 --- a/.buildkite/pipelines/pull-request/packaging-tests-unix.yml +++ b/.buildkite/pipelines/pull-request/packaging-tests-unix.yml @@ -21,6 +21,7 @@ steps: - rhel-8 - rhel-9 - almalinux-8 + - almalinux-9 PACKAGING_TASK: - docker - docker-cloud-ess diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 826091807ce5..79de89145211 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -14,7 +14,7 @@ BWC_VERSION: - "8.12.2" - "8.13.4" - "8.14.3" - - "8.15.6" + - "8.15.5" - "8.16.2" - "8.17.0" - "8.18.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index f92881da7fea..5514fc376a28 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,5 +1,4 @@ BWC_VERSION: - - "8.15.6" - "8.16.2" - "8.17.0" - "8.18.0" diff --git a/branches.json b/branches.json index 0e23a795664d..95fbdb1efd65 100644 --- a/branches.json +++ b/branches.json @@ -13,9 +13,6 @@ { "branch": "8.x" }, - { - "branch": "8.15" - }, { "branch": "7.17" } diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/FormattingPrecommitPlugin.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/FormattingPrecommitPlugin.java index 41c0b4d67e1d..ea9009172c7e 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/FormattingPrecommitPlugin.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/FormattingPrecommitPlugin.java @@ -17,8 +17,6 @@ import org.gradle.api.Plugin; import org.gradle.api.Project; import java.io.File; -import java.util.Arrays; -import java.util.Map; /** * This plugin configures formatting for Java source using Spotless @@ -66,8 +64,7 @@ public class FormattingPrecommitPlugin implements Plugin { java.importOrderFile(new File(elasticsearchWorkspace, importOrderPath)); // Most formatting is done through the Eclipse formatter - java.eclipse().withP2Mirrors(Map.of("https://download.eclipse.org/", "https://mirror.umd.edu/eclipse/")) - .configFile(new File(elasticsearchWorkspace, formatterConfigPath)); + java.eclipse().configFile(new File(elasticsearchWorkspace, formatterConfigPath)); // Ensure blank lines are actually empty. Since formatters are applied in // order, apply this one last, otherwise non-empty blank lines can creep diff --git a/build.gradle b/build.gradle index 715614c1beea..b95e34640cb5 100644 --- a/build.gradle +++ b/build.gradle @@ -301,7 +301,10 @@ allprojects { if (project.path.contains(":distribution:docker")) { enabled = false } - + if (project.path.contains(":libs:cli")) { + // ensure we resolve p2 dependencies for the spotless eclipse formatter + dependsOn "spotlessJavaCheck" + } } plugins.withId('lifecycle-base') { diff --git a/docs/changelog/116423.yaml b/docs/changelog/116423.yaml new file mode 100644 index 000000000000..d6d10eab410e --- /dev/null +++ b/docs/changelog/116423.yaml @@ -0,0 +1,5 @@ +pr: 116423 +summary: Support mTLS for the Elastic Inference Service integration inside the inference API +area: Machine Learning +type: feature +issues: [] diff --git a/docs/changelog/117839.yaml b/docs/changelog/117839.yaml new file mode 100644 index 000000000000..98c97b5078c0 --- /dev/null +++ b/docs/changelog/117839.yaml @@ -0,0 +1,5 @@ +pr: 117839 +summary: Add match support for `semantic_text` fields +area: "Search" +type: enhancement +issues: [] diff --git a/docs/changelog/117840.yaml b/docs/changelog/117840.yaml new file mode 100644 index 000000000000..e1f469643af4 --- /dev/null +++ b/docs/changelog/117840.yaml @@ -0,0 +1,5 @@ +pr: 117840 +summary: Fix timeout ingesting an empty string into a `semantic_text` field +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/118035.yaml b/docs/changelog/118035.yaml new file mode 100644 index 000000000000..fdeaa184723b --- /dev/null +++ b/docs/changelog/118035.yaml @@ -0,0 +1,6 @@ +pr: 118035 +summary: Include hidden indices in `DeprecationInfoAction` +area: Indices APIs +type: bug +issues: + - 118020 diff --git a/docs/changelog/118102.yaml b/docs/changelog/118102.yaml new file mode 100644 index 000000000000..e5ec32cdddbe --- /dev/null +++ b/docs/changelog/118102.yaml @@ -0,0 +1,5 @@ +pr: 118102 +summary: "ESQL: Enterprise license enforcement for CCS" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/118173.yaml b/docs/changelog/118173.yaml new file mode 100644 index 000000000000..a3c9054674ba --- /dev/null +++ b/docs/changelog/118173.yaml @@ -0,0 +1,5 @@ +pr: 118173 +summary: ES|QL categorize with multiple groupings +area: Machine Learning +type: feature +issues: [] diff --git a/docs/changelog/118194.yaml b/docs/changelog/118194.yaml new file mode 100644 index 000000000000..0e5eca55d597 --- /dev/null +++ b/docs/changelog/118194.yaml @@ -0,0 +1,5 @@ +pr: 118194 +summary: Retry on `ClusterBlockException` on transform destination index +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/118291.yaml b/docs/changelog/118291.yaml new file mode 100644 index 000000000000..8001b3972e87 --- /dev/null +++ b/docs/changelog/118291.yaml @@ -0,0 +1,5 @@ +pr: 118291 +summary: Adding a migration reindex cancel API +area: Data streams +type: enhancement +issues: [] diff --git a/docs/changelog/118435.yaml b/docs/changelog/118435.yaml new file mode 100644 index 000000000000..8bccbeb54698 --- /dev/null +++ b/docs/changelog/118435.yaml @@ -0,0 +1,6 @@ +pr: 118435 +summary: '`_score` should not be a reserved attribute in ES|QL' +area: ES|QL +type: enhancement +issues: + - 118460 diff --git a/docs/reference/connector/docs/connectors-release-notes.asciidoc b/docs/reference/connector/docs/connectors-release-notes.asciidoc index e1ed082365c0..ff3d859e1a88 100644 --- a/docs/reference/connector/docs/connectors-release-notes.asciidoc +++ b/docs/reference/connector/docs/connectors-release-notes.asciidoc @@ -9,8 +9,76 @@ Prior to version *8.16.0*, the connector release notes were published as part of the {enterprise-search-ref}/changelog.html[Enterprise Search documentation]. ==== -*Release notes*: +[discrete] +[[es-connectors-release-notes-8-17-0]] +=== 8.17.0 -* <> +No notable changes in this release. -include::release-notes/connectors-release-notes-8.16.0.asciidoc[] +[discrete] +[[es-connectors-release-notes-8-16-1]] +=== 8.16.1 + +[discrete] +[[es-connectors-release-notes-8-16-1-bug-fixes]] +==== Bug fixes + +* Fixed a bug in the Outlook Connector where having deactivated users could cause the sync to fail. +See https://github.com/elastic/connectors/pull/2967[*PR 2967*]. +* Fixed a bug where the Confluence connector was not downloading some blog post documents due to unexpected response format. +See https://github.com/elastic/connectors/pull/2984[*PR 2984*]. + +[discrete] +[[es-connectors-release-notes-8-16-0]] +=== 8.16.0 + +[discrete] +[[es-connectors-release-notes-deprecation-notice]] +==== Deprecation notices + +* *Direct index access for connectors and sync jobs* ++ +IMPORTANT: Directly accessing connector and sync job state through `.elastic-connectors*` indices is deprecated, and will be disallowed entirely in a future release. + +* Instead, the Elasticsearch Connector APIs should be used. Connectors framework code now uses the <> by default. +See https://github.com/elastic/connectors/pull/2884[*PR 2902*]. + +* *Docker `enterprise-search` namespace deprecation* ++ +IMPORTANT: The `enterprise-search` Docker namespace is deprecated and will be discontinued in a future release. ++ +Starting in `8.16.0`, Docker images are being transitioned to the new `integrations` namespace, which will become the sole location for future releases. This affects the https://github.com/elastic/connectors[Elastic Connectors] and https://github.com/elastic/data-extraction-service[Elastic Data Extraction Service]. ++ +During this transition period, images are published to both namespaces: ++ +** *Example*: ++ +Deprecated namespace:: +`docker.elastic.co/enterprise-search/elastic-connectors:v8.16.0` ++ +New namespace:: +`docker.elastic.co/integrations/elastic-connectors:v8.16.0` ++ +Users should migrate to the new `integrations` namespace as soon as possible to ensure continued access to future releases. + +[discrete] +[[es-connectors-release-notes-8-16-0-enhancements]] +==== Enhancements + +* Docker images now use Chainguard's Wolfi base image (`docker.elastic.co/wolfi/jdk:openjdk-11-dev`), replacing the previous `ubuntu:focal` base. + +* The Sharepoint Online connector now works with the `Sites.Selected` permission instead of the broader permission `Sites.Read.All`. +See https://github.com/elastic/connectors/pull/2762[*PR 2762*]. + +* Starting in 8.16.0, connectors will start using proper SEMVER, with `MAJOR.MINOR.PATCH`, which aligns with Elasticsearch/Kibana versions. This drops the previous `.BUILD` suffix, which we used to release connectors between Elastic stack releases. Going forward, these inter-stack-release releases will be suffixed instead with `+`, aligning with Elastic Agent and conforming to SEMVER. +See https://github.com/elastic/connectors/pull/2749[*PR 2749*]. + +* Connector logs now use UTC timestamps, instead of machine-local timestamps. This only impacts logging output. +See https://github.com/elastic/connectors/pull/2695[*PR 2695*]. + +[discrete] +[[es-connectors-release-notes-8-16-0-bug-fixes]] +==== Bug fixes + +* The Dropbox connector now fetches the files from team shared folders. +See https://github.com/elastic/connectors/pull/2718[*PR 2718*]. diff --git a/docs/reference/connector/docs/release-notes/connectors-release-notes-8.16.0.asciidoc b/docs/reference/connector/docs/release-notes/connectors-release-notes-8.16.0.asciidoc deleted file mode 100644 index 760833607317..000000000000 --- a/docs/reference/connector/docs/release-notes/connectors-release-notes-8.16.0.asciidoc +++ /dev/null @@ -1,53 +0,0 @@ -[[es-connectors-release-notes-8-16-0]] -=== 8.16.0 connectors release notes - -[discrete] -[[es-connectors-release-notes-deprecation-notice]] -==== Deprecation notices - -* *Direct index access for connectors and sync jobs* -+ -IMPORTANT: Directly accessing connector and sync job state through `.elastic-connectors*` indices is deprecated, and will be disallowed entirely in a future release. - -* Instead, the Elasticsearch Connector APIs should be used. Connectors framework code now uses the <> by default. -See https://github.com/elastic/connectors/pull/2884[*PR 2902*]. - -* *Docker `enterprise-search` namespace deprecation* -+ -IMPORTANT: The `enterprise-search` Docker namespace is deprecated and will be discontinued in a future release. -+ -Starting in `8.16.0`, Docker images are being transitioned to the new `integrations` namespace, which will become the sole location for future releases. This affects the https://github.com/elastic/connectors[Elastic Connectors] and https://github.com/elastic/data-extraction-service[Elastic Data Extraction Service]. -+ -During this transition period, images are published to both namespaces: -+ -** *Example*: -+ -Deprecated namespace:: -`docker.elastic.co/enterprise-search/elastic-connectors:v8.16.0` -+ -New namespace:: -`docker.elastic.co/integrations/elastic-connectors:v8.16.0` -+ -Users should migrate to the new `integrations` namespace as soon as possible to ensure continued access to future releases. - -[discrete] -[[es-connectors-release-notes-8-16-0-enhancements]] -==== Enhancements - -* Docker images now use Chainguard's Wolfi base image (`docker.elastic.co/wolfi/jdk:openjdk-11-dev`), replacing the previous `ubuntu:focal` base. - -* The Sharepoint Online connector now works with the `Sites.Selected` permission instead of the broader permission `Sites.Read.All`. -See https://github.com/elastic/connectors/pull/2762[*PR 2762*]. - -* Starting in 8.16.0, connectors will start using proper SEMVER, with `MAJOR.MINOR.PATCH`, which aligns with Elasticsearch/Kibana versions. This drops the previous `.BUILD` suffix, which we used to release connectors between Elastic stack releases. Going forward, these inter-stack-release releases will be suffixed instead with `+`, aligning with Elastic Agent and conforming to SEMVER. -See https://github.com/elastic/connectors/pull/2749[*PR 2749*]. - -* Connector logs now use UTC timestamps, instead of machine-local timestamps. This only impacts logging output. -See https://github.com/elastic/connectors/pull/2695[*PR 2695*]. - -[discrete] -[[es-connectors-release-notes-8-16-0-bug-fixes]] -==== Bug fixes - -* The Dropbox connector now fetches the files from team shared folders. -See https://github.com/elastic/connectors/pull/2718[*PR 2718*]. \ No newline at end of file diff --git a/docs/reference/docs/update.asciidoc b/docs/reference/docs/update.asciidoc index ca6a7e489449..a212c4e152b0 100644 --- a/docs/reference/docs/update.asciidoc +++ b/docs/reference/docs/update.asciidoc @@ -71,7 +71,7 @@ include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=refresh] include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=routing] `_source`:: -(Optional, list) Set to `false` to disable source retrieval (default: `true`). +(Optional, list) Set to `true` to enable source retrieval (default: `false`). You can also specify a comma-separated list of the fields you want to retrieve. `_source_excludes`:: diff --git a/docs/reference/index-modules.asciidoc b/docs/reference/index-modules.asciidoc index d9b8f8802a04..73e2db6e45e3 100644 --- a/docs/reference/index-modules.asciidoc +++ b/docs/reference/index-modules.asciidoc @@ -113,7 +113,7 @@ Index mode supports the following values: `standard`::: Standard indexing with default settings. -`tsds`::: _(data streams only)_ Index mode optimized for storage of metrics. For more information, see <>. +`time_series`::: _(data streams only)_ Index mode optimized for storage of metrics. For more information, see <>. `logsdb`::: _(data streams only)_ Index mode optimized for <>. diff --git a/docs/reference/inference/inference-apis.asciidoc b/docs/reference/inference/inference-apis.asciidoc index c7b779a994a0..8d5ee1b7d6ba 100644 --- a/docs/reference/inference/inference-apis.asciidoc +++ b/docs/reference/inference/inference-apis.asciidoc @@ -48,21 +48,21 @@ When adaptive allocations are enabled: For more information about adaptive allocations and resources, refer to the {ml-docs}/ml-nlp-auto-scale.html[trained model autoscaling] documentation. -//[discrete] -//[[default-enpoints]] -//=== Default {infer} endpoints +[discrete] +[[default-enpoints]] +=== Default {infer} endpoints -//Your {es} deployment contains some preconfigured {infer} endpoints that makes it easier for you to use them when defining `semantic_text` fields or {infer} processors. -//The following list contains the default {infer} endpoints listed by `inference_id`: +Your {es} deployment contains preconfigured {infer} endpoints which makes them easier to use when defining `semantic_text` fields or using {infer} processors. +The following list contains the default {infer} endpoints listed by `inference_id`: -//* `.elser-2-elasticsearch`: uses the {ml-docs}/ml-nlp-elser.html[ELSER] built-in trained model for `sparse_embedding` tasks (recommended for English language texts) -//* `.multilingual-e5-small-elasticsearch`: uses the {ml-docs}/ml-nlp-e5.html[E5] built-in trained model for `text_embedding` tasks (recommended for non-English language texts) +* `.elser-2-elasticsearch`: uses the {ml-docs}/ml-nlp-elser.html[ELSER] built-in trained model for `sparse_embedding` tasks (recommended for English language texts) +* `.multilingual-e5-small-elasticsearch`: uses the {ml-docs}/ml-nlp-e5.html[E5] built-in trained model for `text_embedding` tasks (recommended for non-English language texts) -//Use the `inference_id` of the endpoint in a <> field definition or when creating an <>. -//The API call will automatically download and deploy the model which might take a couple of minutes. -//Default {infer} enpoints have {ml-docs}/ml-nlp-auto-scale.html#nlp-model-adaptive-allocations[adaptive allocations] enabled. -//For these models, the minimum number of allocations is `0`. -//If there is no {infer} activity that uses the endpoint, the number of allocations will scale down to `0` automatically after 15 minutes. +Use the `inference_id` of the endpoint in a <> field definition or when creating an <>. +The API call will automatically download and deploy the model which might take a couple of minutes. +Default {infer} enpoints have {ml-docs}/ml-nlp-auto-scale.html#nlp-model-adaptive-allocations[adaptive allocations] enabled. +For these models, the minimum number of allocations is `0`. +If there is no {infer} activity that uses the endpoint, the number of allocations will scale down to `0` automatically after 15 minutes. [discrete] diff --git a/docs/reference/mapping/params/index-prefixes.asciidoc b/docs/reference/mapping/params/index-prefixes.asciidoc index a143c5531c81..1d5e844467b6 100644 --- a/docs/reference/mapping/params/index-prefixes.asciidoc +++ b/docs/reference/mapping/params/index-prefixes.asciidoc @@ -54,3 +54,30 @@ PUT my-index-000001 } } -------------------------------- + +`index_prefixes` parameter instructs {ES} to create a subfield "._index_prefix". This +field will be used to do fast prefix queries. When doing highlighting, add "._index_prefix" +subfield to the `matched_fields` parameter to highlight the main field based on the +found matches of the prefix field, like in the request below: + +[source,console] +-------------------------------- +GET my-index-000001/_search +{ + "query": { + "prefix": { + "full_name": { + "value": "ki" + } + } + }, + "highlight": { + "fields": { + "full_name": { + "matched_fields": ["full_name._index_prefix"] + } + } + } +} +-------------------------------- +// TEST[continued] diff --git a/docs/reference/mapping/types/search-as-you-type.asciidoc b/docs/reference/mapping/types/search-as-you-type.asciidoc index 3c71389f4ceb..c2673a614c26 100644 --- a/docs/reference/mapping/types/search-as-you-type.asciidoc +++ b/docs/reference/mapping/types/search-as-you-type.asciidoc @@ -97,11 +97,21 @@ GET my-index-000001/_search "my_field._3gram" ] } + }, + "highlight": { + "fields": { + "my_field": { + "matched_fields": ["my_field._index_prefix"] <1> + } + } } } -------------------------------------------------- // TEST[continued] +<1> Adding "my_field._index_prefix" to the `matched_fields` allows to highlight + "my_field" also based on matches from "my_field._index_prefix" field. + [source,console-result] -------------------------------------------------- { @@ -126,6 +136,11 @@ GET my-index-000001/_search "_score" : 0.8630463, "_source" : { "my_field" : "quick brown fox jump lazy dog" + }, + "highlight": { + "my_field": [ + "quick brown fox jump lazy dog" + ] } } ] diff --git a/docs/reference/search/search-your-data/ccs-version-compat-matrix.asciidoc b/docs/reference/search/search-your-data/ccs-version-compat-matrix.asciidoc index 5859ccd03e51..a68f20fb1c65 100644 --- a/docs/reference/search/search-your-data/ccs-version-compat-matrix.asciidoc +++ b/docs/reference/search/search-your-data/ccs-version-compat-matrix.asciidoc @@ -1,25 +1,26 @@ |==== -| 20+^h| Remote cluster version +| 21+^h| Remote cluster version h| Local cluster version - | 6.8 | 7.1–7.16 | 7.17 | 8.0 | 8.1 | 8.2 | 8.3 | 8.4 | 8.5 | 8.6 | 8.7 | 8.8 | 8.9 | 8.10 | 8.11 | 8.12 | 8.13 | 8.14 | 8.15 | 8.16 -| 6.8 | {yes-icon} | {yes-icon} | {yes-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} -| 7.1–7.16 | {yes-icon} | {yes-icon} | {yes-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} -| 7.17 | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} -| 8.0 | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} -| 8.1 | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} -| 8.2 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} -| 8.3 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} -| 8.4 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} -| 8.5 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} -| 8.6 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} -| 8.7 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} -| 8.8 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} -| 8.9 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} -| 8.10 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} -| 8.11 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} -| 8.12 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} -| 8.13 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} -| 8.14 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} -| 8.15 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} -| 8.16 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} + | 6.8 | 7.1–7.16 | 7.17 | 8.0 | 8.1 | 8.2 | 8.3 | 8.4 | 8.5 | 8.6 | 8.7 | 8.8 | 8.9 | 8.10 | 8.11 | 8.12 | 8.13 | 8.14 | 8.15 | 8.16 | 8.17 +| 6.8 | {yes-icon} | {yes-icon} | {yes-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} +| 7.1–7.16 | {yes-icon} | {yes-icon} | {yes-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} +| 7.17 | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} +| 8.0 | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} +| 8.1 | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} +| 8.2 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} +| 8.3 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} +| 8.4 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon}| {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} +| 8.5 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon}| {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} +| 8.6 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon}| {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} +| 8.7 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} +| 8.8 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} +| 8.9 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} +| 8.10 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} +| 8.11 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} +| 8.12 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} +| 8.13 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} +| 8.14 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} +| 8.15 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} | {yes-icon} +| 8.16 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} | {yes-icon} +| 8.17 | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {no-icon} | {yes-icon} | {yes-icon} |==== diff --git a/docs/reference/search/search-your-data/semantic-search-inference.asciidoc b/docs/reference/search/search-your-data/semantic-search-inference.asciidoc index 0abc44c809d0..c2fcb88380f5 100644 --- a/docs/reference/search/search-your-data/semantic-search-inference.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search-inference.asciidoc @@ -45,7 +45,7 @@ include::{es-ref-dir}/tab-widgets/inference-api/infer-api-task-widget.asciidoc[] ==== Create the index mapping The mapping of the destination index - the index that contains the embeddings that the model will create based on your input text - must be created. -The destination index must have a field with the <> field type for most models and the <> field type for the sparse vector models like in the case of the `elser` service to index the output of the used model. +The destination index must have a field with the <> field type for most models and the <> field type for the sparse vector models like in the case of the `elasticsearch` service to index the output of the used model. include::{es-ref-dir}/tab-widgets/inference-api/infer-api-mapping-widget.asciidoc[] diff --git a/docs/reference/tab-widgets/inference-api/infer-api-requirements.asciidoc b/docs/reference/tab-widgets/inference-api/infer-api-requirements.asciidoc index eeecb4718658..9e935f79aa0a 100644 --- a/docs/reference/tab-widgets/inference-api/infer-api-requirements.asciidoc +++ b/docs/reference/tab-widgets/inference-api/infer-api-requirements.asciidoc @@ -8,7 +8,7 @@ the Cohere service. // tag::elser[] ELSER is a model trained by Elastic. If you have an {es} deployment, there is no -further requirement for using the {infer} API with the `elser` service. +further requirement for using the {infer} API with the `elasticsearch` service. // end::elser[] diff --git a/libs/entitlement/qa/build.gradle b/libs/entitlement/qa/build.gradle index 86bafc34f4d0..7f46b2fe20a8 100644 --- a/libs/entitlement/qa/build.gradle +++ b/libs/entitlement/qa/build.gradle @@ -13,8 +13,8 @@ apply plugin: 'elasticsearch.internal-test-artifact' dependencies { javaRestTestImplementation project(':libs:entitlement:qa:common') - clusterPlugins project(':libs:entitlement:qa:entitlement-allowed') - clusterPlugins project(':libs:entitlement:qa:entitlement-allowed-nonmodular') + clusterModules project(':libs:entitlement:qa:entitlement-allowed') + clusterModules project(':libs:entitlement:qa:entitlement-allowed-nonmodular') clusterPlugins project(':libs:entitlement:qa:entitlement-denied') clusterPlugins project(':libs:entitlement:qa:entitlement-denied-nonmodular') } diff --git a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsAllowedIT.java b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsAllowedIT.java index 5135fff44531..2fd4472f5cc6 100644 --- a/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsAllowedIT.java +++ b/libs/entitlement/qa/src/javaRestTest/java/org/elasticsearch/entitlement/qa/EntitlementsAllowedIT.java @@ -28,8 +28,8 @@ public class EntitlementsAllowedIT extends ESRestTestCase { @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() - .plugin("entitlement-allowed") - .plugin("entitlement-allowed-nonmodular") + .module("entitlement-allowed") + .module("entitlement-allowed-nonmodular") .systemProperty("es.entitlements.enabled", "true") .setting("xpack.security.enabled", "false") .build(); diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java index 01b8f4d574f9..2abfb11964a9 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java @@ -15,7 +15,6 @@ import com.sun.tools.attach.AttachNotSupportedException; import com.sun.tools.attach.VirtualMachine; import org.elasticsearch.core.SuppressForbidden; -import org.elasticsearch.core.Tuple; import org.elasticsearch.entitlement.initialization.EntitlementInitialization; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; @@ -29,7 +28,9 @@ import java.util.function.Function; public class EntitlementBootstrap { - public record BootstrapArgs(Collection> pluginData, Function, String> pluginResolver) {} + public record PluginData(Path pluginPath, boolean isModular, boolean isExternalPlugin) {} + + public record BootstrapArgs(Collection pluginData, Function, String> pluginResolver) {} private static BootstrapArgs bootstrapArgs; @@ -40,11 +41,11 @@ public class EntitlementBootstrap { /** * Activates entitlement checking. Once this method returns, calls to methods protected by Entitlements from classes without a valid * policy will throw {@link org.elasticsearch.entitlement.runtime.api.NotEntitledException}. - * @param pluginData a collection of (plugin path, boolean), that holds the paths of all the installed Elasticsearch modules and - * plugins, and whether they are Java modular or not. + * @param pluginData a collection of (plugin path, boolean, boolean), that holds the paths of all the installed Elasticsearch modules + * and plugins, whether they are Java modular or not, and whether they are Elasticsearch modules or external plugins. * @param pluginResolver a functor to map a Java Class to the plugin it belongs to (the plugin name). */ - public static void bootstrap(Collection> pluginData, Function, String> pluginResolver) { + public static void bootstrap(Collection pluginData, Function, String> pluginResolver) { logger.debug("Loading entitlement agent"); if (EntitlementBootstrap.bootstrapArgs != null) { throw new IllegalStateException("plugin data is already set"); diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java index fb694308466c..2956efa8eec3 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java @@ -9,7 +9,6 @@ package org.elasticsearch.entitlement.initialization; -import org.elasticsearch.core.Tuple; import org.elasticsearch.core.internal.provider.ProviderLocator; import org.elasticsearch.entitlement.bootstrap.EntitlementBootstrap; import org.elasticsearch.entitlement.bridge.EntitlementChecker; @@ -96,25 +95,25 @@ public class EntitlementInitialization { return new PolicyManager(serverPolicy, pluginPolicies, EntitlementBootstrap.bootstrapArgs().pluginResolver()); } - private static Map createPluginPolicies(Collection> pluginData) throws IOException { + private static Map createPluginPolicies(Collection pluginData) throws IOException { Map pluginPolicies = new HashMap<>(pluginData.size()); - for (Tuple entry : pluginData) { - Path pluginRoot = entry.v1(); - boolean isModular = entry.v2(); - + for (var entry : pluginData) { + Path pluginRoot = entry.pluginPath(); String pluginName = pluginRoot.getFileName().toString(); - final Policy policy = loadPluginPolicy(pluginRoot, isModular, pluginName); + + final Policy policy = loadPluginPolicy(pluginRoot, entry.isModular(), pluginName, entry.isExternalPlugin()); pluginPolicies.put(pluginName, policy); } return pluginPolicies; } - private static Policy loadPluginPolicy(Path pluginRoot, boolean isModular, String pluginName) throws IOException { + private static Policy loadPluginPolicy(Path pluginRoot, boolean isModular, String pluginName, boolean isExternalPlugin) + throws IOException { Path policyFile = pluginRoot.resolve(POLICY_FILE_NAME); final Set moduleNames = getModuleNames(pluginRoot, isModular); - final Policy policy = parsePolicyIfExists(pluginName, policyFile); + final Policy policy = parsePolicyIfExists(pluginName, policyFile, isExternalPlugin); // TODO: should this check actually be part of the parser? for (Scope scope : policy.scopes) { @@ -125,9 +124,9 @@ public class EntitlementInitialization { return policy; } - private static Policy parsePolicyIfExists(String pluginName, Path policyFile) throws IOException { + private static Policy parsePolicyIfExists(String pluginName, Path policyFile, boolean isExternalPlugin) throws IOException { if (Files.exists(policyFile)) { - return new PolicyParser(Files.newInputStream(policyFile, StandardOpenOption.READ), pluginName).parsePolicy(); + return new PolicyParser(Files.newInputStream(policyFile, StandardOpenOption.READ), pluginName, isExternalPlugin).parsePolicy(); } return new Policy(pluginName, List.of()); } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExternalEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExternalEntitlement.java index bb1205696b49..768babdb840f 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExternalEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/ExternalEntitlement.java @@ -33,4 +33,12 @@ public @interface ExternalEntitlement { * have to match the parameter names of the constructor. */ String[] parameterNames() default {}; + + /** + * This flag indicates if this Entitlement can be used in external plugins, + * or if it can be used only in Elasticsearch modules ("internal" plugins). + * Using an entitlement that is not {@code pluginsAccessible} in an external + * plugin policy will throw in exception while parsing. + */ + boolean esModulesOnly() default true; } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileEntitlement.java index d0837bc09618..4fdbcc93ea6e 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileEntitlement.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileEntitlement.java @@ -26,7 +26,7 @@ public class FileEntitlement implements Entitlement { private final String path; private final int actions; - @ExternalEntitlement(parameterNames = { "path", "actions" }) + @ExternalEntitlement(parameterNames = { "path", "actions" }, esModulesOnly = false) public FileEntitlement(String path, List actionsList) { this.path = path; int actionsInt = 0; diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java index a77c86d5ffd0..8d3efe4eb98e 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java @@ -18,7 +18,6 @@ import org.elasticsearch.logging.Logger; import java.lang.module.ModuleFinder; import java.lang.module.ModuleReference; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.IdentityHashMap; import java.util.List; @@ -56,8 +55,8 @@ public class PolicyManager { final Map moduleEntitlementsMap = new HashMap<>(); - protected final Policy serverPolicy; - protected final Map pluginPolicies; + protected final Map> serverEntitlements; + protected final Map>> pluginsEntitlements; private final Function, String> pluginResolver; public static final String ALL_UNNAMED = "ALL-UNNAMED"; @@ -79,19 +78,16 @@ public class PolicyManager { } public PolicyManager(Policy defaultPolicy, Map pluginPolicies, Function, String> pluginResolver) { - this.serverPolicy = Objects.requireNonNull(defaultPolicy); - this.pluginPolicies = Collections.unmodifiableMap(Objects.requireNonNull(pluginPolicies)); + this.serverEntitlements = buildScopeEntitlementsMap(Objects.requireNonNull(defaultPolicy)); + this.pluginsEntitlements = Objects.requireNonNull(pluginPolicies) + .entrySet() + .stream() + .collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, e -> buildScopeEntitlementsMap(e.getValue()))); this.pluginResolver = pluginResolver; } - private static List lookupEntitlementsForModule(Policy policy, String moduleName) { - for (int i = 0; i < policy.scopes.size(); ++i) { - var scope = policy.scopes.get(i); - if (scope.name.equals(moduleName)) { - return scope.entitlements; - } - } - return null; + private static Map> buildScopeEntitlementsMap(Policy policy) { + return policy.scopes.stream().collect(Collectors.toUnmodifiableMap(scope -> scope.name, scope -> scope.entitlements)); } public void checkExitVM(Class callerClass) { @@ -141,21 +137,21 @@ public class PolicyManager { if (isServerModule(requestingModule)) { var scopeName = requestingModule.getName(); - return getModuleEntitlementsOrThrow(callerClass, requestingModule, serverPolicy, scopeName); + return getModuleEntitlementsOrThrow(callerClass, requestingModule, serverEntitlements, scopeName); } // plugins var pluginName = pluginResolver.apply(callerClass); if (pluginName != null) { - var pluginPolicy = pluginPolicies.get(pluginName); - if (pluginPolicy != null) { + var pluginEntitlements = pluginsEntitlements.get(pluginName); + if (pluginEntitlements != null) { final String scopeName; if (requestingModule.isNamed() == false) { scopeName = ALL_UNNAMED; } else { scopeName = requestingModule.getName(); } - return getModuleEntitlementsOrThrow(callerClass, requestingModule, pluginPolicy, scopeName); + return getModuleEntitlementsOrThrow(callerClass, requestingModule, pluginEntitlements, scopeName); } } @@ -167,15 +163,20 @@ public class PolicyManager { return Strings.format("Missing entitlement policy: caller [%s], module [%s]", callerClass, requestingModule.getName()); } - private ModuleEntitlements getModuleEntitlementsOrThrow(Class callerClass, Module module, Policy policy, String moduleName) { - var entitlements = lookupEntitlementsForModule(policy, moduleName); + private ModuleEntitlements getModuleEntitlementsOrThrow( + Class callerClass, + Module module, + Map> scopeEntitlements, + String moduleName + ) { + var entitlements = scopeEntitlements.get(moduleName); if (entitlements == null) { // Module without entitlements - remember we don't have any moduleEntitlementsMap.put(module, ModuleEntitlements.NONE); throw new NotEntitledException(buildModuleNoPolicyMessage(callerClass, module)); } // We have a policy for this module - var classEntitlements = createClassEntitlements(entitlements); + var classEntitlements = new ModuleEntitlements(entitlements); moduleEntitlementsMap.put(module, classEntitlements); return classEntitlements; } @@ -184,10 +185,6 @@ public class PolicyManager { return requestingModule.isNamed() && requestingModule.getLayer() == ModuleLayer.boot(); } - private ModuleEntitlements createClassEntitlements(List entitlements) { - return new ModuleEntitlements(entitlements); - } - private static Module requestingModule(Class callerClass) { if (callerClass != null) { Module callerModule = callerClass.getModule(); @@ -222,6 +219,6 @@ public class PolicyManager { @Override public String toString() { - return "PolicyManager{" + "serverPolicy=" + serverPolicy + ", pluginPolicies=" + pluginPolicies + '}'; + return "PolicyManager{" + "serverEntitlements=" + serverEntitlements + ", pluginsEntitlements=" + pluginsEntitlements + '}'; } } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java index 0d1a7c14ece4..fb63d5ffbeb4 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java @@ -39,6 +39,7 @@ public class PolicyParser { protected final XContentParser policyParser; protected final String policyName; + private final boolean isExternalPlugin; static String getEntitlementTypeName(Class entitlementClass) { var entitlementClassName = entitlementClass.getSimpleName(); @@ -56,9 +57,10 @@ public class PolicyParser { .collect(Collectors.joining("_")); } - public PolicyParser(InputStream inputStream, String policyName) throws IOException { + public PolicyParser(InputStream inputStream, String policyName, boolean isExternalPlugin) throws IOException { this.policyParser = YamlXContent.yamlXContent.createParser(XContentParserConfiguration.EMPTY, Objects.requireNonNull(inputStream)); this.policyName = policyName; + this.isExternalPlugin = isExternalPlugin; } public Policy parsePolicy() { @@ -125,6 +127,10 @@ public class PolicyParser { throw newPolicyParserException(scopeName, "unknown entitlement type [" + entitlementType + "]"); } + if (entitlementMetadata.esModulesOnly() && isExternalPlugin) { + throw newPolicyParserException("entitlement type [" + entitlementType + "] is allowed only on modules"); + } + Class[] parameterTypes = entitlementConstructor.getParameterTypes(); String[] parametersNames = entitlementMetadata.parameterNames(); diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java index 7eb2b1fb476b..dfcc5d8916f2 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserFailureTests.java @@ -19,7 +19,7 @@ public class PolicyParserFailureTests extends ESTestCase { public void testParserSyntaxFailures() { PolicyParserException ppe = expectThrows( PolicyParserException.class, - () -> new PolicyParser(new ByteArrayInputStream("[]".getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml") + () -> new PolicyParser(new ByteArrayInputStream("[]".getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml", false) .parsePolicy() ); assertEquals("[1:1] policy parsing error for [test-failure-policy.yaml]: expected object ", ppe.getMessage()); @@ -29,7 +29,7 @@ public class PolicyParserFailureTests extends ESTestCase { PolicyParserException ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" entitlement-module-name: - does_not_exist: {} - """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml").parsePolicy()); + """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml", false).parsePolicy()); assertEquals( "[2:5] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name]: " + "unknown entitlement type [does_not_exist]", @@ -41,7 +41,7 @@ public class PolicyParserFailureTests extends ESTestCase { PolicyParserException ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" entitlement-module-name: - file: {} - """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml").parsePolicy()); + """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml", false).parsePolicy()); assertEquals( "[2:12] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " + "for entitlement type [file]: missing entitlement parameter [path]", @@ -52,7 +52,7 @@ public class PolicyParserFailureTests extends ESTestCase { entitlement-module-name: - file: path: test-path - """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml").parsePolicy()); + """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml", false).parsePolicy()); assertEquals( "[4:1] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " + "for entitlement type [file]: missing entitlement parameter [actions]", @@ -68,11 +68,22 @@ public class PolicyParserFailureTests extends ESTestCase { actions: - read extra: test - """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml").parsePolicy()); + """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml", false).parsePolicy()); assertEquals( "[7:1] policy parsing error for [test-failure-policy.yaml] in scope [entitlement-module-name] " + "for entitlement type [file]: extraneous entitlement parameter(s) {extra=test}", ppe.getMessage() ); } + + public void testEntitlementIsNotForExternalPlugins() { + PolicyParserException ppe = expectThrows(PolicyParserException.class, () -> new PolicyParser(new ByteArrayInputStream(""" + entitlement-module-name: + - create_class_loader + """.getBytes(StandardCharsets.UTF_8)), "test-failure-policy.yaml", true).parsePolicy()); + assertEquals( + "[2:5] policy parsing error for [test-failure-policy.yaml]: entitlement type [create_class_loader] is allowed only on modules", + ppe.getMessage() + ); + } } diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java index a514cfe41889..633c76cb8c04 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java @@ -37,7 +37,17 @@ public class PolicyParserTests extends ESTestCase { } public void testPolicyBuilder() throws IOException { - Policy parsedPolicy = new PolicyParser(PolicyParserTests.class.getResourceAsStream("test-policy.yaml"), "test-policy.yaml") + Policy parsedPolicy = new PolicyParser(PolicyParserTests.class.getResourceAsStream("test-policy.yaml"), "test-policy.yaml", false) + .parsePolicy(); + Policy builtPolicy = new Policy( + "test-policy.yaml", + List.of(new Scope("entitlement-module-name", List.of(new FileEntitlement("test/path/to/file", List.of("read", "write"))))) + ); + assertEquals(parsedPolicy, builtPolicy); + } + + public void testPolicyBuilderOnExternalPlugin() throws IOException { + Policy parsedPolicy = new PolicyParser(PolicyParserTests.class.getResourceAsStream("test-policy.yaml"), "test-policy.yaml", true) .parsePolicy(); Policy builtPolicy = new Policy( "test-policy.yaml", @@ -50,7 +60,7 @@ public class PolicyParserTests extends ESTestCase { Policy parsedPolicy = new PolicyParser(new ByteArrayInputStream(""" entitlement-module-name: - create_class_loader - """.getBytes(StandardCharsets.UTF_8)), "test-policy.yaml").parsePolicy(); + """.getBytes(StandardCharsets.UTF_8)), "test-policy.yaml", false).parsePolicy(); Policy builtPolicy = new Policy( "test-policy.yaml", List.of(new Scope("entitlement-module-name", List.of(new CreateClassLoaderEntitlement()))) diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequest.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequest.java index 8e0d838e602e..4bb30fdb0dd0 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequest.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequest.java @@ -9,7 +9,6 @@ package org.elasticsearch.index.rankeval; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; @@ -46,9 +45,7 @@ public final class RankEvalRequest extends ActionRequest implements IndicesReque rankingEvaluationSpec = new RankEvalSpec(in); indices = in.readStringArray(); indicesOptions = IndicesOptions.readIndicesOptions(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_6_0)) { - searchType = SearchType.fromId(in.readByte()); - } + searchType = SearchType.fromId(in.readByte()); } RankEvalRequest() {} @@ -127,9 +124,7 @@ public final class RankEvalRequest extends ActionRequest implements IndicesReque rankingEvaluationSpec.writeTo(out); out.writeStringArray(indices); indicesOptions.writeIndicesOptions(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_6_0)) { - out.writeByte(searchType.id()); - } + out.writeByte(searchType.id()); } @Override diff --git a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3EcsCredentialsRestIT.java b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3EcsCredentialsRestIT.java index a79ae4de7cc6..4f0bf8300064 100644 --- a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3EcsCredentialsRestIT.java +++ b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3EcsCredentialsRestIT.java @@ -10,6 +10,7 @@ package org.elasticsearch.repositories.s3; import fixture.aws.imds.Ec2ImdsHttpFixture; +import fixture.aws.imds.Ec2ImdsServiceBuilder; import fixture.aws.imds.Ec2ImdsVersion; import fixture.s3.DynamicS3Credentials; import fixture.s3.S3HttpFixture; @@ -37,9 +38,8 @@ public class RepositoryS3EcsCredentialsRestIT extends AbstractRepositoryS3RestTe private static final DynamicS3Credentials dynamicS3Credentials = new DynamicS3Credentials(); private static final Ec2ImdsHttpFixture ec2ImdsHttpFixture = new Ec2ImdsHttpFixture( - Ec2ImdsVersion.V1, - dynamicS3Credentials::addValidCredentials, - Set.of("/ecs_credentials_endpoint") + new Ec2ImdsServiceBuilder(Ec2ImdsVersion.V1).newCredentialsConsumer(dynamicS3Credentials::addValidCredentials) + .alternativeCredentialsEndpoints(Set.of("/ecs_credentials_endpoint")) ); private static final S3HttpFixture s3Fixture = new S3HttpFixture(true, BUCKET, BASE_PATH, dynamicS3Credentials::isAuthorized); diff --git a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV1CredentialsRestIT.java b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV1CredentialsRestIT.java index ead91981b3fa..bc41b9fd62ca 100644 --- a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV1CredentialsRestIT.java +++ b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV1CredentialsRestIT.java @@ -10,6 +10,7 @@ package org.elasticsearch.repositories.s3; import fixture.aws.imds.Ec2ImdsHttpFixture; +import fixture.aws.imds.Ec2ImdsServiceBuilder; import fixture.aws.imds.Ec2ImdsVersion; import fixture.s3.DynamicS3Credentials; import fixture.s3.S3HttpFixture; @@ -23,8 +24,6 @@ import org.junit.ClassRule; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -import java.util.Set; - @ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) @ThreadLeakScope(ThreadLeakScope.Scope.NONE) // https://github.com/elastic/elasticsearch/issues/102482 public class RepositoryS3ImdsV1CredentialsRestIT extends AbstractRepositoryS3RestTestCase { @@ -37,9 +36,7 @@ public class RepositoryS3ImdsV1CredentialsRestIT extends AbstractRepositoryS3Res private static final DynamicS3Credentials dynamicS3Credentials = new DynamicS3Credentials(); private static final Ec2ImdsHttpFixture ec2ImdsHttpFixture = new Ec2ImdsHttpFixture( - Ec2ImdsVersion.V1, - dynamicS3Credentials::addValidCredentials, - Set.of() + new Ec2ImdsServiceBuilder(Ec2ImdsVersion.V1).newCredentialsConsumer(dynamicS3Credentials::addValidCredentials) ); private static final S3HttpFixture s3Fixture = new S3HttpFixture(true, BUCKET, BASE_PATH, dynamicS3Credentials::isAuthorized); @@ -47,7 +44,7 @@ public class RepositoryS3ImdsV1CredentialsRestIT extends AbstractRepositoryS3Res public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .module("repository-s3") .setting("s3.client." + CLIENT + ".endpoint", s3Fixture::getAddress) - .systemProperty("com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", ec2ImdsHttpFixture::getAddress) + .systemProperty(Ec2ImdsHttpFixture.ENDPOINT_OVERRIDE_SYSPROP_NAME, ec2ImdsHttpFixture::getAddress) .build(); @ClassRule diff --git a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV2CredentialsRestIT.java b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV2CredentialsRestIT.java index 67adb096bd1b..34500ff5227f 100644 --- a/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV2CredentialsRestIT.java +++ b/modules/repository-s3/src/javaRestTest/java/org/elasticsearch/repositories/s3/RepositoryS3ImdsV2CredentialsRestIT.java @@ -10,6 +10,7 @@ package org.elasticsearch.repositories.s3; import fixture.aws.imds.Ec2ImdsHttpFixture; +import fixture.aws.imds.Ec2ImdsServiceBuilder; import fixture.aws.imds.Ec2ImdsVersion; import fixture.s3.DynamicS3Credentials; import fixture.s3.S3HttpFixture; @@ -23,8 +24,6 @@ import org.junit.ClassRule; import org.junit.rules.RuleChain; import org.junit.rules.TestRule; -import java.util.Set; - @ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) @ThreadLeakScope(ThreadLeakScope.Scope.NONE) // https://github.com/elastic/elasticsearch/issues/102482 public class RepositoryS3ImdsV2CredentialsRestIT extends AbstractRepositoryS3RestTestCase { @@ -37,9 +36,7 @@ public class RepositoryS3ImdsV2CredentialsRestIT extends AbstractRepositoryS3Res private static final DynamicS3Credentials dynamicS3Credentials = new DynamicS3Credentials(); private static final Ec2ImdsHttpFixture ec2ImdsHttpFixture = new Ec2ImdsHttpFixture( - Ec2ImdsVersion.V2, - dynamicS3Credentials::addValidCredentials, - Set.of() + new Ec2ImdsServiceBuilder(Ec2ImdsVersion.V2).newCredentialsConsumer(dynamicS3Credentials::addValidCredentials) ); private static final S3HttpFixture s3Fixture = new S3HttpFixture(true, BUCKET, BASE_PATH, dynamicS3Credentials::isAuthorized); @@ -47,7 +44,7 @@ public class RepositoryS3ImdsV2CredentialsRestIT extends AbstractRepositoryS3Res public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .module("repository-s3") .setting("s3.client." + CLIENT + ".endpoint", s3Fixture::getAddress) - .systemProperty("com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", ec2ImdsHttpFixture::getAddress) + .systemProperty(Ec2ImdsHttpFixture.ENDPOINT_OVERRIDE_SYSPROP_NAME, ec2ImdsHttpFixture::getAddress) .build(); @ClassRule diff --git a/muted-tests.yml b/muted-tests.yml index 573bd035b872..95beeb7aa8f8 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -52,12 +52,6 @@ tests: - class: org.elasticsearch.backwards.MixedClusterClientYamlTestSuiteIT method: test {p0=mtermvectors/10_basic/Tests catching other exceptions per item} issue: https://github.com/elastic/elasticsearch/issues/113325 -- class: org.elasticsearch.integration.KibanaUserRoleIntegTests - method: testFieldMappings - issue: https://github.com/elastic/elasticsearch/issues/113592 -- class: org.elasticsearch.integration.KibanaUserRoleIntegTests - method: testSearchAndMSearch - issue: https://github.com/elastic/elasticsearch/issues/113593 - class: org.elasticsearch.xpack.transform.integration.TransformIT method: testStopWaitForCheckpoint issue: https://github.com/elastic/elasticsearch/issues/106113 @@ -180,6 +174,12 @@ tests: - class: "org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT" method: "test {scoring.*}" issue: https://github.com/elastic/elasticsearch/issues/117641 +- class: "org.elasticsearch.xpack.esql.qa.mixed.MultilusterEsqlSpecIT" + method: "test {scoring.*}" + issue: https://github.com/elastic/elasticsearch/issues/118460 +- class: "org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT" + method: "test {scoring.*}" + issue: https://github.com/elastic/elasticsearch/issues/118460 - class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT method: test {scoring.QstrWithFieldAndScoringSortedEval} issue: https://github.com/elastic/elasticsearch/issues/117751 @@ -276,9 +276,6 @@ tests: - class: org.elasticsearch.action.search.SearchQueryThenFetchAsyncActionTests method: testBottomFieldSort issue: https://github.com/elastic/elasticsearch/issues/118214 -- class: org.elasticsearch.xpack.esql.action.CrossClustersEnrichIT - method: testTopNThenEnrichRemote - issue: https://github.com/elastic/elasticsearch/issues/118307 - class: org.elasticsearch.xpack.remotecluster.CrossClusterEsqlRCS1UnavailableRemotesIT method: testEsqlRcs1UnavailableRemoteScenarios issue: https://github.com/elastic/elasticsearch/issues/118350 @@ -317,6 +314,9 @@ tests: - class: org.elasticsearch.packaging.test.DockerTests method: test011SecurityEnabledStatus issue: https://github.com/elastic/elasticsearch/issues/118517 +- class: org.elasticsearch.reservedstate.service.FileSettingsServiceTests + method: testInvalidJSON + issue: https://github.com/elastic/elasticsearch/issues/116521 # Examples: # diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index 233557722534..95715217fa59 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -7,6 +7,7 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ apply plugin: 'elasticsearch.internal-java-rest-test' +apply plugin: 'elasticsearch.internal-cluster-test' esplugin { description 'The EC2 discovery plugin allows to use AWS API for the unicast discovery mechanism.' @@ -26,6 +27,11 @@ dependencies { api "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}" api "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:${versions.jackson}" api "joda-time:joda-time:2.10.10" + + javaRestTestImplementation project(':plugins:discovery-ec2') + javaRestTestImplementation project(':test:fixtures:ec2-imds-fixture') + + internalClusterTestImplementation project(':test:fixtures:ec2-imds-fixture') } tasks.named("dependencyLicenses").configure { @@ -79,7 +85,7 @@ tasks.register("writeTestJavaPolicy") { } } -tasks.named("test").configure { +tasks.withType(Test).configureEach { dependsOn "writeTestJavaPolicy" // this is needed for insecure plugins, remove if possible! systemProperty 'tests.artifact', project.name diff --git a/plugins/discovery-ec2/src/internalClusterTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2NetworkAddressesTestCase.java b/plugins/discovery-ec2/src/internalClusterTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2NetworkAddressesTestCase.java new file mode 100644 index 000000000000..b6a4845977b0 --- /dev/null +++ b/plugins/discovery-ec2/src/internalClusterTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2NetworkAddressesTestCase.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.discovery.ec2; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.http.HttpServerTransport; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; + +import java.io.IOException; +import java.util.Collection; + +@ESIntegTestCase.ClusterScope(numDataNodes = 0) +public abstract class DiscoveryEc2NetworkAddressesTestCase extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return CollectionUtils.appendToCopyNoNullElements(super.nodePlugins(), Ec2DiscoveryPlugin.class); + } + + @Override + protected boolean addMockHttpTransport() { + return false; + } + + void verifyPublishAddress(String publishAddressSetting, String expectedAddress) throws IOException { + final var node = internalCluster().startNode(Settings.builder().put("http.publish_host", publishAddressSetting)); + assertEquals( + expectedAddress, + internalCluster().getInstance(HttpServerTransport.class, node).boundAddress().publishAddress().getAddress() + ); + internalCluster().stopNode(node); + } +} diff --git a/plugins/discovery-ec2/src/internalClusterTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2RegularNetworkAddressesIT.java b/plugins/discovery-ec2/src/internalClusterTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2RegularNetworkAddressesIT.java new file mode 100644 index 000000000000..491fa37a4c87 --- /dev/null +++ b/plugins/discovery-ec2/src/internalClusterTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2RegularNetworkAddressesIT.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.discovery.ec2; + +import fixture.aws.imds.Ec2ImdsHttpFixture; +import fixture.aws.imds.Ec2ImdsServiceBuilder; +import fixture.aws.imds.Ec2ImdsVersion; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.transport.BindTransportException; + +import java.io.IOException; +import java.util.concurrent.ExecutionException; + +import static org.hamcrest.Matchers.containsString; + +public class DiscoveryEc2RegularNetworkAddressesIT extends DiscoveryEc2NetworkAddressesTestCase { + public void testLocalIgnoresImds() { + Ec2ImdsHttpFixture.runWithFixture(new Ec2ImdsServiceBuilder(randomFrom(Ec2ImdsVersion.values())), imdsFixture -> { + try (var ignored = Ec2ImdsHttpFixture.withEc2MetadataServiceEndpointOverride(imdsFixture.getAddress())) { + verifyPublishAddress("_local_", "127.0.0.1"); + } + }); + } + + public void testImdsNotAvailable() throws IOException { + try (var ignored = Ec2ImdsHttpFixture.withEc2MetadataServiceEndpointOverride("http://127.0.0.1")) { + // if IMDS is not running, regular values like `_local_` should still work + verifyPublishAddress("_local_", "127.0.0.1"); + + // but EC2 addresses will cause the node to fail to start + final var assertionError = expectThrows( + AssertionError.class, + () -> internalCluster().startNode(Settings.builder().put("http.publish_host", "_ec2_")) + ); + final var executionException = asInstanceOf(ExecutionException.class, assertionError.getCause()); + final var bindTransportException = asInstanceOf(BindTransportException.class, executionException.getCause()); + assertEquals("Failed to resolve publish address", bindTransportException.getMessage()); + final var ioException = asInstanceOf(IOException.class, bindTransportException.getCause()); + assertThat(ioException.getMessage(), containsString("/latest/meta-data/local-ipv4")); + } + } +} diff --git a/plugins/discovery-ec2/src/internalClusterTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2SpecialNetworkAddressesIT.java b/plugins/discovery-ec2/src/internalClusterTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2SpecialNetworkAddressesIT.java new file mode 100644 index 000000000000..f541c4cdd979 --- /dev/null +++ b/plugins/discovery-ec2/src/internalClusterTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2SpecialNetworkAddressesIT.java @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.discovery.ec2; + +import fixture.aws.imds.Ec2ImdsHttpFixture; +import fixture.aws.imds.Ec2ImdsServiceBuilder; +import fixture.aws.imds.Ec2ImdsVersion; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import java.util.Map; +import java.util.stream.Stream; + +public class DiscoveryEc2SpecialNetworkAddressesIT extends DiscoveryEc2NetworkAddressesTestCase { + + private final String imdsAddressName; + private final String elasticsearchAddressName; + private final Ec2ImdsVersion imdsVersion; + + public DiscoveryEc2SpecialNetworkAddressesIT( + @Name("imdsAddressName") String imdsAddressName, + @Name("elasticsearchAddressName") String elasticsearchAddressName, + @Name("imdsVersion") Ec2ImdsVersion imdsVersion + ) { + this.imdsAddressName = imdsAddressName; + this.elasticsearchAddressName = elasticsearchAddressName; + this.imdsVersion = imdsVersion; + } + + @ParametersFactory + public static Iterable parameters() { + return Map.of( + "_ec2:privateIpv4_", + "local-ipv4", + "_ec2:privateDns_", + "local-hostname", + "_ec2:publicIpv4_", + "public-ipv4", + "_ec2:publicDns_", + "public-hostname", + "_ec2:publicIp_", + "public-ipv4", + "_ec2:privateIp_", + "local-ipv4", + "_ec2_", + "local-ipv4" + ) + .entrySet() + .stream() + .flatMap( + addresses -> Stream.of(Ec2ImdsVersion.values()) + .map(ec2ImdsVersion -> new Object[] { addresses.getValue(), addresses.getKey(), ec2ImdsVersion }) + ) + .toList(); + } + + public void testSpecialNetworkAddresses() { + final var publishAddress = "10.0." + between(0, 255) + "." + between(0, 255); + Ec2ImdsHttpFixture.runWithFixture( + new Ec2ImdsServiceBuilder(imdsVersion).addInstanceAddress(imdsAddressName, publishAddress), + imdsFixture -> { + try (var ignored = Ec2ImdsHttpFixture.withEc2MetadataServiceEndpointOverride(imdsFixture.getAddress())) { + verifyPublishAddress(elasticsearchAddressName, publishAddress); + } + } + ); + } + +} diff --git a/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2AvailabilityZoneAttributeImdsV1IT.java b/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2AvailabilityZoneAttributeImdsV1IT.java new file mode 100644 index 000000000000..32291236ea15 --- /dev/null +++ b/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2AvailabilityZoneAttributeImdsV1IT.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.discovery.ec2; + +import fixture.aws.imds.Ec2ImdsHttpFixture; +import fixture.aws.imds.Ec2ImdsServiceBuilder; +import fixture.aws.imds.Ec2ImdsVersion; + +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +public class DiscoveryEc2AvailabilityZoneAttributeImdsV1IT extends DiscoveryEc2AvailabilityZoneAttributeTestCase { + private static final Ec2ImdsHttpFixture ec2ImdsHttpFixture = new Ec2ImdsHttpFixture( + new Ec2ImdsServiceBuilder(Ec2ImdsVersion.V1).availabilityZoneSupplier( + DiscoveryEc2AvailabilityZoneAttributeTestCase::getAvailabilityZone + ) + ); + + public static ElasticsearchCluster cluster = buildCluster(ec2ImdsHttpFixture::getAddress); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(ec2ImdsHttpFixture).around(cluster); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } +} diff --git a/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2AvailabilityZoneAttributeImdsV2IT.java b/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2AvailabilityZoneAttributeImdsV2IT.java new file mode 100644 index 000000000000..8b785d688e7c --- /dev/null +++ b/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2AvailabilityZoneAttributeImdsV2IT.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.discovery.ec2; + +import fixture.aws.imds.Ec2ImdsHttpFixture; +import fixture.aws.imds.Ec2ImdsServiceBuilder; +import fixture.aws.imds.Ec2ImdsVersion; + +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +public class DiscoveryEc2AvailabilityZoneAttributeImdsV2IT extends DiscoveryEc2AvailabilityZoneAttributeTestCase { + private static final Ec2ImdsHttpFixture ec2ImdsHttpFixture = new Ec2ImdsHttpFixture( + new Ec2ImdsServiceBuilder(Ec2ImdsVersion.V2).availabilityZoneSupplier( + DiscoveryEc2AvailabilityZoneAttributeTestCase::getAvailabilityZone + ) + ); + + public static ElasticsearchCluster cluster = buildCluster(ec2ImdsHttpFixture::getAddress); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(ec2ImdsHttpFixture).around(cluster); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } +} diff --git a/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2AvailabilityZoneAttributeNoImdsIT.java b/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2AvailabilityZoneAttributeNoImdsIT.java new file mode 100644 index 000000000000..602a98e17970 --- /dev/null +++ b/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2AvailabilityZoneAttributeNoImdsIT.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.discovery.ec2; + +import org.elasticsearch.client.Request; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.junit.ClassRule; + +import java.io.IOException; + +public class DiscoveryEc2AvailabilityZoneAttributeNoImdsIT extends ESRestTestCase { + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .plugin("discovery-ec2") + .setting(AwsEc2Service.AUTO_ATTRIBUTE_SETTING.getKey(), "true") + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + public void testAvailabilityZoneAttribute() throws IOException { + final var nodesInfoResponse = assertOKAndCreateObjectPath(client().performRequest(new Request("GET", "/_nodes/_all/_none"))); + for (final var nodeId : nodesInfoResponse.evaluateMapKeys("nodes")) { + assertNull(nodesInfoResponse.evaluateExact("nodes", nodeId, "attributes", "aws_availability_zone")); + } + } +} diff --git a/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2AvailabilityZoneAttributeTestCase.java b/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2AvailabilityZoneAttributeTestCase.java new file mode 100644 index 000000000000..178c5c3ad4ca --- /dev/null +++ b/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2AvailabilityZoneAttributeTestCase.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.discovery.ec2; + +import fixture.aws.imds.Ec2ImdsHttpFixture; + +import org.elasticsearch.client.Request; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.hamcrest.Matchers; + +import java.io.IOException; +import java.util.Objects; +import java.util.Set; +import java.util.function.Supplier; + +public abstract class DiscoveryEc2AvailabilityZoneAttributeTestCase extends ESRestTestCase { + + private static final Set createdAvailabilityZones = ConcurrentCollections.newConcurrentSet(); + + protected static String getAvailabilityZone() { + final var zoneName = randomIdentifier(); + createdAvailabilityZones.add(zoneName); + return zoneName; + } + + protected static ElasticsearchCluster buildCluster(Supplier imdsFixtureAddressSupplier) { + return ElasticsearchCluster.local() + .plugin("discovery-ec2") + .setting(AwsEc2Service.AUTO_ATTRIBUTE_SETTING.getKey(), "true") + .systemProperty(Ec2ImdsHttpFixture.ENDPOINT_OVERRIDE_SYSPROP_NAME, imdsFixtureAddressSupplier) + .build(); + } + + public void testAvailabilityZoneAttribute() throws IOException { + final var nodesInfoResponse = assertOKAndCreateObjectPath(client().performRequest(new Request("GET", "/_nodes/_all/_none"))); + for (final var nodeId : nodesInfoResponse.evaluateMapKeys("nodes")) { + assertThat( + createdAvailabilityZones, + Matchers.hasItem( + Objects.requireNonNull(nodesInfoResponse.evaluateExact("nodes", nodeId, "attributes", "aws_availability_zone")) + ) + ); + } + } +} diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2NetworkTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2NetworkTests.java deleted file mode 100644 index 82787f53c9f7..000000000000 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2NetworkTests.java +++ /dev/null @@ -1,181 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.discovery.ec2; - -import com.sun.net.httpserver.HttpServer; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.network.NetworkService; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.SuppressForbidden; -import org.elasticsearch.mocksocket.MockHttpServer; -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.test.ESTestCase; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; - -import java.io.IOException; -import java.io.OutputStream; -import java.net.InetAddress; -import java.net.InetSocketAddress; -import java.security.AccessController; -import java.security.PrivilegedAction; -import java.util.Arrays; -import java.util.Collections; -import java.util.function.BiConsumer; - -import static com.amazonaws.SDKGlobalConfiguration.EC2_METADATA_SERVICE_OVERRIDE_SYSTEM_PROPERTY; -import static java.nio.charset.StandardCharsets.UTF_8; -import static org.hamcrest.Matchers.arrayContaining; -import static org.hamcrest.Matchers.equalTo; - -/** - * Test for EC2 network.host settings. - *

- * Warning: This test doesn't assert that the exceptions are thrown. - * They aren't. - */ -@SuppressForbidden(reason = "use http server") -public class Ec2NetworkTests extends ESTestCase { - - private static HttpServer httpServer; - - @BeforeClass - public static void startHttp() throws Exception { - httpServer = MockHttpServer.createHttp(new InetSocketAddress(InetAddress.getLoopbackAddress().getHostAddress(), 0), 0); - - BiConsumer registerContext = (path, v) -> { - final byte[] message = v.getBytes(UTF_8); - httpServer.createContext(path, (s) -> { - s.sendResponseHeaders(RestStatus.OK.getStatus(), message.length); - OutputStream responseBody = s.getResponseBody(); - responseBody.write(message); - responseBody.close(); - }); - }; - registerContext.accept("/latest/meta-data/local-ipv4", "127.0.0.1"); - registerContext.accept("/latest/meta-data/public-ipv4", "165.168.10.2"); - registerContext.accept("/latest/meta-data/public-hostname", "165.168.10.3"); - registerContext.accept("/latest/meta-data/local-hostname", "10.10.10.5"); - - httpServer.start(); - } - - @Before - public void setup() { - // redirect EC2 metadata service to httpServer - AccessController.doPrivileged( - (PrivilegedAction) () -> System.setProperty( - EC2_METADATA_SERVICE_OVERRIDE_SYSTEM_PROPERTY, - "http://" + httpServer.getAddress().getHostName() + ":" + httpServer.getAddress().getPort() - ) - ); - } - - @AfterClass - public static void stopHttp() { - httpServer.stop(0); - httpServer = null; - } - - /** - * Test for network.host: _ec2_ - */ - public void testNetworkHostEc2() throws IOException { - resolveEc2("_ec2_", InetAddress.getByName("127.0.0.1")); - } - - /** - * Test for network.host: _ec2_ - */ - public void testNetworkHostUnableToResolveEc2() { - // redirect EC2 metadata service to unknown location - AccessController.doPrivileged( - (PrivilegedAction) () -> System.setProperty(EC2_METADATA_SERVICE_OVERRIDE_SYSTEM_PROPERTY, "http://127.0.0.1/") - ); - - try { - resolveEc2("_ec2_", (InetAddress[]) null); - } catch (IOException e) { - assertThat( - e.getMessage(), - equalTo("IOException caught when fetching InetAddress from [http://127.0.0.1//latest/meta-data/local-ipv4]") - ); - } - } - - /** - * Test for network.host: _ec2:publicIp_ - */ - public void testNetworkHostEc2PublicIp() throws IOException { - resolveEc2("_ec2:publicIp_", InetAddress.getByName("165.168.10.2")); - } - - /** - * Test for network.host: _ec2:privateIp_ - */ - public void testNetworkHostEc2PrivateIp() throws IOException { - resolveEc2("_ec2:privateIp_", InetAddress.getByName("127.0.0.1")); - } - - /** - * Test for network.host: _ec2:privateIpv4_ - */ - public void testNetworkHostEc2PrivateIpv4() throws IOException { - resolveEc2("_ec2:privateIpv4_", InetAddress.getByName("127.0.0.1")); - } - - /** - * Test for network.host: _ec2:privateDns_ - */ - public void testNetworkHostEc2PrivateDns() throws IOException { - resolveEc2("_ec2:privateDns_", InetAddress.getByName("10.10.10.5")); - } - - /** - * Test for network.host: _ec2:publicIpv4_ - */ - public void testNetworkHostEc2PublicIpv4() throws IOException { - resolveEc2("_ec2:publicIpv4_", InetAddress.getByName("165.168.10.2")); - } - - /** - * Test for network.host: _ec2:publicDns_ - */ - public void testNetworkHostEc2PublicDns() throws IOException { - resolveEc2("_ec2:publicDns_", InetAddress.getByName("165.168.10.3")); - } - - private InetAddress[] resolveEc2(String host, InetAddress... expected) throws IOException { - Settings nodeSettings = Settings.builder().put("network.host", host).build(); - - NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver())); - - InetAddress[] addresses = networkService.resolveBindHostAddresses( - NetworkService.GLOBAL_NETWORK_BIND_HOST_SETTING.get(nodeSettings).toArray(Strings.EMPTY_ARRAY) - ); - if (expected == null) { - fail("We should get an IOException, resolved addressed:" + Arrays.toString(addresses)); - } - assertThat(addresses, arrayContaining(expected)); - return addresses; - } - - /** - * Test that we don't have any regression with network host core settings such as - * network.host: _local_ - */ - public void testNetworkHostCoreLocal() throws IOException { - NetworkService networkService = new NetworkService(Collections.singletonList(new Ec2NameResolver())); - InetAddress[] addresses = networkService.resolveBindHostAddresses(null); - assertThat(addresses, arrayContaining(networkService.resolveBindHostAddresses(new String[] { "_local_" }))); - } -} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/migrate.cancel_reindex.json b/rest-api-spec/src/main/resources/rest-api-spec/api/migrate.cancel_reindex.json new file mode 100644 index 000000000000..a034f204edbf --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/migrate.cancel_reindex.json @@ -0,0 +1,30 @@ +{ + "migrate.cancel_reindex":{ + "documentation":{ + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-stream-reindex.html", + "description":"This API returns the status of a migration reindex attempt for a data stream or index" + }, + "stability":"experimental", + "visibility":"private", + "headers":{ + "accept": [ "application/json"], + "content_type": ["application/json"] + }, + "url":{ + "paths":[ + { + "path":"/_migration/reindex/{index}/_cancel", + "methods":[ + "POST" + ], + "parts":{ + "index":{ + "type":"string", + "description":"The index or data stream name" + } + } + } + ] + } + } +} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml index 335e0b4783bf..8e8afafc9f06 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml @@ -84,32 +84,6 @@ - match: { error.type: "illegal_argument_exception" } - match: { error.reason: "Types cannot be provided in put mapping requests" } ---- -"Put mappings with explicit _doc type bwc": - - skip: - cluster_features: [ "gte_v8.0.0"] - reason: "old deprecation message for pre 8.0" - - requires: - test_runner_features: ["node_selector"] - - do: - indices.create: - index: test_index - - - do: - node_selector: - version: "original" - catch: bad_request - indices.put_mapping: - index: test_index - body: - _doc: - properties: - field: - type: keyword - - - match: { error.type: "illegal_argument_exception" } - - match: { error.reason: "Types cannot be provided in put mapping requests, unless the include_type_name parameter is set to true." } - --- "Update per-field metadata": diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 3c397d02a403..d7ba415687af 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -480,5 +480,5 @@ module org.elasticsearch.server { exports org.elasticsearch.lucene.spatial; exports org.elasticsearch.inference.configuration; exports org.elasticsearch.monitor.metrics; - + exports org.elasticsearch.plugins.internal.rewriter to org.elasticsearch.inference; } diff --git a/server/src/main/java/org/elasticsearch/ElasticsearchException.java b/server/src/main/java/org/elasticsearch/ElasticsearchException.java index 11736bfe07de..a430611559bb 100644 --- a/server/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/server/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -40,7 +40,6 @@ import org.elasticsearch.persistent.NotPersistentTaskNodeException; import org.elasticsearch.persistent.PersistentTaskNodeNotAssignedException; import org.elasticsearch.rest.ApiNotAvailableException; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.search.SearchException; import org.elasticsearch.search.TooManyScrollContextsException; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.MultiBucketConsumerService; @@ -319,10 +318,6 @@ public class ElasticsearchException extends RuntimeException implements ToXConte public static ElasticsearchException readException(StreamInput input, int id) throws IOException { CheckedFunction elasticsearchException = ID_TO_SUPPLIER.get(id); if (elasticsearchException == null) { - if (id == 127 && input.getTransportVersion().before(TransportVersions.V_7_5_0)) { - // was SearchContextException - return new SearchException(input); - } throw new IllegalStateException("unknown exception for id: " + id); } return elasticsearchException.apply(input); @@ -1817,13 +1812,13 @@ public class ElasticsearchException extends RuntimeException implements ToXConte org.elasticsearch.index.seqno.RetentionLeaseInvalidRetainingSeqNoException.class, org.elasticsearch.index.seqno.RetentionLeaseInvalidRetainingSeqNoException::new, 156, - TransportVersions.V_7_5_0 + UNKNOWN_VERSION_ADDED ), INGEST_PROCESSOR_EXCEPTION( org.elasticsearch.ingest.IngestProcessorException.class, org.elasticsearch.ingest.IngestProcessorException::new, 157, - TransportVersions.V_7_5_0 + UNKNOWN_VERSION_ADDED ), PEER_RECOVERY_NOT_FOUND_EXCEPTION( org.elasticsearch.indices.recovery.PeerRecoveryNotFound.class, diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 40856d782598..95fcbb072bbf 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -55,7 +55,6 @@ public class TransportVersions { public static final TransportVersion V_7_0_0 = def(7_00_00_99); public static final TransportVersion V_7_3_0 = def(7_03_00_99); public static final TransportVersion V_7_4_0 = def(7_04_00_99); - public static final TransportVersion V_7_5_0 = def(7_05_00_99); public static final TransportVersion V_7_6_0 = def(7_06_00_99); public static final TransportVersion V_7_7_0 = def(7_07_00_99); public static final TransportVersion V_7_8_0 = def(7_08_00_99); @@ -139,6 +138,7 @@ public class TransportVersions { public static final TransportVersion RETRIES_AND_OPERATIONS_IN_BLOBSTORE_STATS = def(8_804_00_0); public static final TransportVersion ADD_DATA_STREAM_OPTIONS_TO_TEMPLATES = def(8_805_00_0); public static final TransportVersion KNN_QUERY_RESCORE_OVERSAMPLE = def(8_806_00_0); + public static final TransportVersion SEMANTIC_QUERY_LENIENT = def(8_807_00_0); /* * WARNING: DO NOT MERGE INTO MAIN! diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 24aa5bd261d7..f03505de310d 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -187,7 +187,7 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_15_2 = new Version(8_15_02_99); public static final Version V_8_15_3 = new Version(8_15_03_99); public static final Version V_8_15_4 = new Version(8_15_04_99); - public static final Version V_8_15_6 = new Version(8_15_06_99); + public static final Version V_8_15_5 = new Version(8_15_05_99); public static final Version V_8_16_0 = new Version(8_16_00_99); public static final Version V_8_16_1 = new Version(8_16_01_99); public static final Version V_8_16_2 = new Version(8_16_02_99); diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index ae59f6578f03..9be23c91db07 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.util.concurrent.RunOnce; import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.SuppressForbidden; -import org.elasticsearch.core.Tuple; import org.elasticsearch.entitlement.bootstrap.EntitlementBootstrap; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexVersion; @@ -59,6 +58,7 @@ import java.util.List; import java.util.Objects; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import java.util.stream.Stream; import static org.elasticsearch.bootstrap.BootstrapSettings.SECURITY_FILTER_BAD_DEFAULTS_SETTING; import static org.elasticsearch.nativeaccess.WindowsFunctions.ConsoleCtrlHandler.CTRL_CLOSE_EVENT; @@ -218,10 +218,14 @@ class Elasticsearch { if (Boolean.parseBoolean(System.getProperty("es.entitlements.enabled"))) { LogManager.getLogger(Elasticsearch.class).info("Bootstrapping Entitlements"); - List> pluginData = pluginsLoader.allBundles() - .stream() - .map(bundle -> Tuple.tuple(bundle.getDir(), bundle.pluginDescriptor().isModular())) - .toList(); + List pluginData = Stream.concat( + pluginsLoader.moduleBundles() + .stream() + .map(bundle -> new EntitlementBootstrap.PluginData(bundle.getDir(), bundle.pluginDescriptor().isModular(), false)), + pluginsLoader.pluginBundles() + .stream() + .map(bundle -> new EntitlementBootstrap.PluginData(bundle.getDir(), bundle.pluginDescriptor().isModular(), true)) + ).toList(); EntitlementBootstrap.bootstrap(pluginData, pluginsResolver::resolveClassToPluginName); } else if (RuntimeVersionFeature.isSecurityManagerAvailable()) { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java index 952789e1bf74..c74275991c89 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java @@ -1116,10 +1116,10 @@ public class IndexMetadata implements Diffable, ToXContentFragmen /** * Return the {@link IndexVersion} that this index provides compatibility for. - * This is typically compared to the {@link IndexVersions#MINIMUM_COMPATIBLE} to figure out whether the index can be handled - * by the cluster. - * By default, this is equal to the {@link #getCreationVersion()}, but can also be a newer version if the index has been imported as - * a legacy index from an older snapshot, and its metadata has been converted to be handled by newer version nodes. + * This is typically compared to the {@link IndexVersions#MINIMUM_COMPATIBLE} or {@link IndexVersions#MINIMUM_READONLY_COMPATIBLE} + * to figure out whether the index can be handled by the cluster. + * By default, this is equal to the {@link #getCreationVersion()}, but can also be a newer version if the index has been created by + * a legacy version, and imported archive, in which case its metadata has been converted to be handled by newer version nodes. */ public IndexVersion getCompatibilityVersion() { return indexCompatibilityVersion; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index 345db5291183..e6f8e834365d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -67,6 +67,7 @@ import java.util.function.Predicate; * Note: This class is performance sensitive, so we pay extra attention on the data structure usage and we avoid streams and iterators * when possible in favor of the classic for-i loops. */ +@SuppressWarnings("ForLoopReplaceableByForEach") public class IndexNameExpressionResolver { private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(IndexNameExpressionResolver.class); diff --git a/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java b/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java index 12c698a6ed95..5e6dec7b6806 100644 --- a/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java +++ b/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java @@ -65,7 +65,6 @@ public class DiscoveryNodes implements Iterable, SimpleDiffable, SimpleDiffable ingestNodes, @Nullable String masterNodeId, @Nullable String localNodeId, - Version minNonClientNodeVersion, Version maxNodeVersion, Version minNodeVersion, IndexVersion maxDataNodeCompatibleIndexVersion, @@ -98,7 +96,6 @@ public class DiscoveryNodes implements Iterable, SimpleDiffable, SimpleDiffable, SimpleDiffable, SimpleDiffable nodeEntry : nodes.entrySet()) { DiscoveryNode discoNode = nodeEntry.getValue(); Version version = discoNode.getVersion(); if (discoNode.canContainData() || discoNode.isMasterNode()) { - minNonClientNodeVersion = min(minNonClientNodeVersion, version); maxDataNodeCompatibleIndexVersion = min(maxDataNodeCompatibleIndexVersion, discoNode.getMaxIndexVersion()); } minNodeVersion = min(minNodeVersion, version); @@ -894,7 +877,6 @@ public class DiscoveryNodes implements Iterable, SimpleDiffable snapshotCommitSuppliers + Map snapshotCommitSuppliers, + QueryRewriteInterceptor queryRewriteInterceptor ) throws IOException { final IndexEventListener eventListener = freeze(); Function> readerWrapperFactory = indexReaderWrapper @@ -540,7 +542,8 @@ public final class IndexModule { indexFoldersDeletionListener, snapshotCommitSupplier, indexCommitListener.get(), - mapperMetrics + mapperMetrics, + queryRewriteInterceptor ); success = true; return indexService; diff --git a/server/src/main/java/org/elasticsearch/index/IndexService.java b/server/src/main/java/org/elasticsearch/index/IndexService.java index 571bbd76a49d..a5b3991d89bc 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexService.java +++ b/server/src/main/java/org/elasticsearch/index/IndexService.java @@ -85,6 +85,7 @@ import org.elasticsearch.indices.cluster.IndicesClusterStateService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.plugins.IndexStorePlugin; +import org.elasticsearch.plugins.internal.rewriter.QueryRewriteInterceptor; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; import org.elasticsearch.threadpool.ThreadPool; @@ -162,6 +163,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust private final Supplier indexSortSupplier; private final ValuesSourceRegistry valuesSourceRegistry; private final MapperMetrics mapperMetrics; + private final QueryRewriteInterceptor queryRewriteInterceptor; @SuppressWarnings("this-escape") public IndexService( @@ -196,7 +198,8 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust IndexStorePlugin.IndexFoldersDeletionListener indexFoldersDeletionListener, IndexStorePlugin.SnapshotCommitSupplier snapshotCommitSupplier, Engine.IndexCommitListener indexCommitListener, - MapperMetrics mapperMetrics + MapperMetrics mapperMetrics, + QueryRewriteInterceptor queryRewriteInterceptor ) { super(indexSettings); assert indexCreationContext != IndexCreationContext.RELOAD_ANALYZERS @@ -271,6 +274,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust this.indexingOperationListeners = Collections.unmodifiableList(indexingOperationListeners); this.indexCommitListener = indexCommitListener; this.mapperMetrics = mapperMetrics; + this.queryRewriteInterceptor = queryRewriteInterceptor; try (var ignored = threadPool.getThreadContext().clearTraceContext()) { // kick off async ops for the first shard in this index this.refreshTask = new AsyncRefreshTask(this); @@ -802,6 +806,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust allowExpensiveQueries, scriptService, null, + null, null ); } diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersion.java b/server/src/main/java/org/elasticsearch/index/IndexVersion.java index cfb51cc3b5ae..22deee132562 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersion.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersion.java @@ -123,8 +123,13 @@ public record IndexVersion(int id, Version luceneVersion) implements VersionId VERSION_IDS = getAllVersionIds(IndexVersions.class); static final IndexVersion LATEST_DEFINED; @@ -207,7 +208,7 @@ public class IndexVersions { Map versionIdFields = new HashMap<>(); NavigableMap builder = new TreeMap<>(); - Set ignore = Set.of("ZERO", "MINIMUM_COMPATIBLE"); + Set ignore = Set.of("ZERO", "MINIMUM_COMPATIBLE", "MINIMUM_READONLY_COMPATIBLE"); for (Field declaredField : cls.getFields()) { if (declaredField.getType().equals(IndexVersion.class)) { diff --git a/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java index f00e6904feac..05262798bac2 100644 --- a/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.SuggestingErrorOnUnknown; +import org.elasticsearch.plugins.internal.rewriter.QueryRewriteInterceptor; import org.elasticsearch.xcontent.AbstractObjectParser; import org.elasticsearch.xcontent.FilterXContentParser; import org.elasticsearch.xcontent.FilterXContentParserWrapper; @@ -278,6 +279,14 @@ public abstract class AbstractQueryBuilder> @Override public final QueryBuilder rewrite(QueryRewriteContext queryRewriteContext) throws IOException { + QueryRewriteInterceptor queryRewriteInterceptor = queryRewriteContext.getQueryRewriteInterceptor(); + if (queryRewriteInterceptor != null) { + var rewritten = queryRewriteInterceptor.interceptAndRewrite(queryRewriteContext, this); + if (rewritten != this) { + return new InterceptedQueryBuilderWrapper(rewritten); + } + } + QueryBuilder rewritten = doRewrite(queryRewriteContext); if (rewritten == this) { return rewritten; diff --git a/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java b/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java index e054f17ef64d..a84455ef09bf 100644 --- a/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/CoordinatorRewriteContext.java @@ -104,6 +104,7 @@ public class CoordinatorRewriteContext extends QueryRewriteContext { null, null, null, + null, null ); this.dateFieldRangeInfo = dateFieldRangeInfo; diff --git a/server/src/main/java/org/elasticsearch/index/query/InnerHitContextBuilder.java b/server/src/main/java/org/elasticsearch/index/query/InnerHitContextBuilder.java index aacb4b4129c7..31bc7dddacb7 100644 --- a/server/src/main/java/org/elasticsearch/index/query/InnerHitContextBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/InnerHitContextBuilder.java @@ -66,6 +66,9 @@ public abstract class InnerHitContextBuilder { public static void extractInnerHits(QueryBuilder query, Map innerHitBuilders) { if (query instanceof AbstractQueryBuilder) { ((AbstractQueryBuilder) query).extractInnerHitBuilders(innerHitBuilders); + } else if (query instanceof InterceptedQueryBuilderWrapper interceptedQuery) { + // Unwrap an intercepted query here + extractInnerHits(interceptedQuery.queryBuilder, innerHitBuilders); } else { throw new IllegalStateException( "provided query builder [" + query.getClass() + "] class should inherit from AbstractQueryBuilder, but it doesn't" diff --git a/server/src/main/java/org/elasticsearch/index/query/InterceptedQueryBuilderWrapper.java b/server/src/main/java/org/elasticsearch/index/query/InterceptedQueryBuilderWrapper.java new file mode 100644 index 000000000000..b1030e4a76d9 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/query/InterceptedQueryBuilderWrapper.java @@ -0,0 +1,109 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.query; + +import org.apache.lucene.search.Query; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.plugins.internal.rewriter.QueryRewriteInterceptor; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +/** + * Wrapper for instances of {@link QueryBuilder} that have been intercepted using the {@link QueryRewriteInterceptor} to + * break out of the rewrite phase. These instances are unwrapped on serialization. + */ +class InterceptedQueryBuilderWrapper implements QueryBuilder { + + protected final QueryBuilder queryBuilder; + + InterceptedQueryBuilderWrapper(QueryBuilder queryBuilder) { + super(); + this.queryBuilder = queryBuilder; + } + + @Override + public QueryBuilder rewrite(QueryRewriteContext queryRewriteContext) throws IOException { + QueryRewriteInterceptor queryRewriteInterceptor = queryRewriteContext.getQueryRewriteInterceptor(); + try { + queryRewriteContext.setQueryRewriteInterceptor(null); + QueryBuilder rewritten = queryBuilder.rewrite(queryRewriteContext); + return rewritten != queryBuilder ? new InterceptedQueryBuilderWrapper(rewritten) : this; + } finally { + queryRewriteContext.setQueryRewriteInterceptor(queryRewriteInterceptor); + } + } + + @Override + public String getWriteableName() { + return queryBuilder.getWriteableName(); + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return queryBuilder.getMinimalSupportedVersion(); + } + + @Override + public Query toQuery(SearchExecutionContext context) throws IOException { + return queryBuilder.toQuery(context); + } + + @Override + public QueryBuilder queryName(String queryName) { + queryBuilder.queryName(queryName); + return this; + } + + @Override + public String queryName() { + return queryBuilder.queryName(); + } + + @Override + public float boost() { + return queryBuilder.boost(); + } + + @Override + public QueryBuilder boost(float boost) { + queryBuilder.boost(boost); + return this; + } + + @Override + public String getName() { + return queryBuilder.getName(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + queryBuilder.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return queryBuilder.toXContent(builder, params); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o instanceof InterceptedQueryBuilderWrapper == false) return false; + return Objects.equals(queryBuilder, ((InterceptedQueryBuilderWrapper) o).queryBuilder); + } + + @Override + public int hashCode() { + return Objects.hashCode(queryBuilder); + } +} diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java b/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java index fce74aa60ab1..265a0c52593b 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java @@ -28,6 +28,7 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; +import org.elasticsearch.plugins.internal.rewriter.QueryRewriteInterceptor; import org.elasticsearch.script.ScriptCompiler; import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; import org.elasticsearch.search.builder.PointInTimeBuilder; @@ -70,6 +71,7 @@ public class QueryRewriteContext { protected Predicate allowedFields; private final ResolvedIndices resolvedIndices; private final PointInTimeBuilder pit; + private QueryRewriteInterceptor queryRewriteInterceptor; public QueryRewriteContext( final XContentParserConfiguration parserConfiguration, @@ -86,7 +88,8 @@ public class QueryRewriteContext { final BooleanSupplier allowExpensiveQueries, final ScriptCompiler scriptService, final ResolvedIndices resolvedIndices, - final PointInTimeBuilder pit + final PointInTimeBuilder pit, + final QueryRewriteInterceptor queryRewriteInterceptor ) { this.parserConfiguration = parserConfiguration; @@ -105,6 +108,7 @@ public class QueryRewriteContext { this.scriptService = scriptService; this.resolvedIndices = resolvedIndices; this.pit = pit; + this.queryRewriteInterceptor = queryRewriteInterceptor; } public QueryRewriteContext(final XContentParserConfiguration parserConfiguration, final Client client, final LongSupplier nowInMillis) { @@ -123,6 +127,7 @@ public class QueryRewriteContext { null, null, null, + null, null ); } @@ -132,7 +137,8 @@ public class QueryRewriteContext { final Client client, final LongSupplier nowInMillis, final ResolvedIndices resolvedIndices, - final PointInTimeBuilder pit + final PointInTimeBuilder pit, + final QueryRewriteInterceptor queryRewriteInterceptor ) { this( parserConfiguration, @@ -149,7 +155,8 @@ public class QueryRewriteContext { null, null, resolvedIndices, - pit + pit, + queryRewriteInterceptor ); } @@ -428,4 +435,13 @@ public class QueryRewriteContext { // It was decided we should only test the first of these potentially multiple preferences. return value.split(",")[0].trim(); } + + public QueryRewriteInterceptor getQueryRewriteInterceptor() { + return queryRewriteInterceptor; + } + + public void setQueryRewriteInterceptor(QueryRewriteInterceptor queryRewriteInterceptor) { + this.queryRewriteInterceptor = queryRewriteInterceptor; + } + } diff --git a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java index fbc3696d4022..b2ee6134a772 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java @@ -271,6 +271,7 @@ public class SearchExecutionContext extends QueryRewriteContext { allowExpensiveQueries, scriptService, null, + null, null ); this.shardId = shardId; diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 8e32c47dd1f1..a4e2790af596 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -137,6 +137,7 @@ import org.elasticsearch.node.Node; import org.elasticsearch.plugins.FieldPredicate; import org.elasticsearch.plugins.IndexStorePlugin; import org.elasticsearch.plugins.PluginsService; +import org.elasticsearch.plugins.internal.rewriter.QueryRewriteInterceptor; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; @@ -265,6 +266,7 @@ public class IndicesService extends AbstractLifecycleComponent private final MapperMetrics mapperMetrics; private final PostRecoveryMerger postRecoveryMerger; private final List searchOperationListeners; + private final QueryRewriteInterceptor queryRewriteInterceptor; @Override protected void doStart() { @@ -334,6 +336,7 @@ public class IndicesService extends AbstractLifecycleComponent this.indexFoldersDeletionListeners = new CompositeIndexFoldersDeletionListener(builder.indexFoldersDeletionListeners); this.snapshotCommitSuppliers = builder.snapshotCommitSuppliers; this.requestCacheKeyDifferentiator = builder.requestCacheKeyDifferentiator; + this.queryRewriteInterceptor = builder.queryRewriteInterceptor; this.mapperMetrics = builder.mapperMetrics; // doClose() is called when shutting down a node, yet there might still be ongoing requests // that we need to wait for before closing some resources such as the caches. In order to @@ -783,7 +786,8 @@ public class IndicesService extends AbstractLifecycleComponent idFieldMappers.apply(idxSettings.getMode()), valuesSourceRegistry, indexFoldersDeletionListeners, - snapshotCommitSuppliers + snapshotCommitSuppliers, + queryRewriteInterceptor ); } @@ -1771,7 +1775,7 @@ public class IndicesService extends AbstractLifecycleComponent * Returns a new {@link QueryRewriteContext} with the given {@code now} provider */ public QueryRewriteContext getRewriteContext(LongSupplier nowInMillis, ResolvedIndices resolvedIndices, PointInTimeBuilder pit) { - return new QueryRewriteContext(parserConfig, client, nowInMillis, resolvedIndices, pit); + return new QueryRewriteContext(parserConfig, client, nowInMillis, resolvedIndices, pit, queryRewriteInterceptor); } public DataRewriteContext getDataRewriteContext(LongSupplier nowInMillis) { diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesServiceBuilder.java b/server/src/main/java/org/elasticsearch/indices/IndicesServiceBuilder.java index 09d04421bc19..813776c293eb 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesServiceBuilder.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesServiceBuilder.java @@ -33,6 +33,8 @@ import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.plugins.EnginePlugin; import org.elasticsearch.plugins.IndexStorePlugin; import org.elasticsearch.plugins.PluginsService; +import org.elasticsearch.plugins.SearchPlugin; +import org.elasticsearch.plugins.internal.rewriter.QueryRewriteInterceptor; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; import org.elasticsearch.search.internal.ShardSearchRequest; @@ -78,6 +80,7 @@ public class IndicesServiceBuilder { CheckedBiConsumer requestCacheKeyDifferentiator; MapperMetrics mapperMetrics; List searchOperationListener = List.of(); + QueryRewriteInterceptor queryRewriteInterceptor = null; public IndicesServiceBuilder settings(Settings settings) { this.settings = settings; @@ -247,6 +250,27 @@ public class IndicesServiceBuilder { .flatMap(m -> m.entrySet().stream()) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + var queryRewriteInterceptors = pluginsService.filterPlugins(SearchPlugin.class) + .map(SearchPlugin::getQueryRewriteInterceptors) + .flatMap(List::stream) + .collect(Collectors.toMap(QueryRewriteInterceptor::getQueryName, interceptor -> { + if (interceptor.getQueryName() == null) { + throw new IllegalArgumentException("QueryRewriteInterceptor [" + interceptor.getClass().getName() + "] requires name"); + } + return interceptor; + }, (a, b) -> { + throw new IllegalStateException( + "Conflicting rewrite interceptors [" + + a.getQueryName() + + "] found in [" + + a.getClass().getName() + + "] and [" + + b.getClass().getName() + + "]" + ); + })); + queryRewriteInterceptor = QueryRewriteInterceptor.multi(queryRewriteInterceptors); + return new IndicesService(this); } } diff --git a/server/src/main/java/org/elasticsearch/indices/SystemIndexMappingUpdateService.java b/server/src/main/java/org/elasticsearch/indices/SystemIndexMappingUpdateService.java index 1036ae0d55b4..65e67db39dc2 100644 --- a/server/src/main/java/org/elasticsearch/indices/SystemIndexMappingUpdateService.java +++ b/server/src/main/java/org/elasticsearch/indices/SystemIndexMappingUpdateService.java @@ -99,7 +99,7 @@ public class SystemIndexMappingUpdateService implements ClusterStateListener { } // if we're in a mixed-version cluster, exit - if (state.nodes().getMaxNodeVersion().after(state.nodes().getSmallestNonClientNodeVersion())) { + if (state.nodes().isMixedVersionCluster()) { logger.debug("Skipping system indices up-to-date check as cluster has mixed versions"); return; } diff --git a/server/src/main/java/org/elasticsearch/injection/guice/Binder.java b/server/src/main/java/org/elasticsearch/injection/guice/Binder.java index c34bebd10c2e..d59edfce8918 100644 --- a/server/src/main/java/org/elasticsearch/injection/guice/Binder.java +++ b/server/src/main/java/org/elasticsearch/injection/guice/Binder.java @@ -65,9 +65,7 @@ import org.elasticsearch.injection.guice.spi.Message; * *

The {@link Provider} you use here does not have to be a "factory"; that * is, a provider which always creates each instance it provides. - * However, this is generally a good practice to follow. You can then use - * Guice's concept of {@link Scope scopes} to guide when creation should happen - * -- "letting Guice work for you". + * However, this is generally a good practice to follow. * *

  *     bind(Service.class).annotatedWith(Red.class).to(ServiceImpl.class);
diff --git a/server/src/main/java/org/elasticsearch/injection/guice/BindingProcessor.java b/server/src/main/java/org/elasticsearch/injection/guice/BindingProcessor.java index 9223261ec2dd..677f111c764a 100644 --- a/server/src/main/java/org/elasticsearch/injection/guice/BindingProcessor.java +++ b/server/src/main/java/org/elasticsearch/injection/guice/BindingProcessor.java @@ -218,7 +218,6 @@ class BindingProcessor extends AbstractProcessor { MembersInjector.class, Module.class, Provider.class, - Scope.class, TypeLiteral.class ); // TODO(jessewilson): fix BuiltInModule, then add Stage diff --git a/server/src/main/java/org/elasticsearch/injection/guice/InjectorBuilder.java b/server/src/main/java/org/elasticsearch/injection/guice/InjectorBuilder.java index 99d42faf6a80..fe9ac309e23f 100644 --- a/server/src/main/java/org/elasticsearch/injection/guice/InjectorBuilder.java +++ b/server/src/main/java/org/elasticsearch/injection/guice/InjectorBuilder.java @@ -20,6 +20,7 @@ import org.elasticsearch.injection.guice.internal.BindingImpl; import org.elasticsearch.injection.guice.internal.Errors; import org.elasticsearch.injection.guice.internal.ErrorsException; import org.elasticsearch.injection.guice.internal.InternalContext; +import org.elasticsearch.injection.guice.internal.Scoping; import org.elasticsearch.injection.guice.internal.Stopwatch; import org.elasticsearch.injection.guice.spi.Dependency; @@ -154,7 +155,7 @@ class InjectorBuilder { } private static void loadEagerSingletons(InjectorImpl injector, final Errors errors, BindingImpl binding) { - if (binding.getScoping().isEagerSingleton()) { + if (binding.getScoping() == Scoping.EAGER_SINGLETON) { try { injector.callInContext(new ContextualCallable() { final Dependency dependency = Dependency.get(binding.getKey()); diff --git a/server/src/main/java/org/elasticsearch/injection/guice/Provider.java b/server/src/main/java/org/elasticsearch/injection/guice/Provider.java index 692617239ea7..6de9d8ff9dc8 100644 --- a/server/src/main/java/org/elasticsearch/injection/guice/Provider.java +++ b/server/src/main/java/org/elasticsearch/injection/guice/Provider.java @@ -28,8 +28,6 @@ package org.elasticsearch.injection.guice; * instances, instances you wish to safely mutate and discard, instances which are out of scope * (e.g. using a {@code @RequestScoped} object from within a {@code @SessionScoped} object), or * instances that will be initialized lazily. - *
  • A custom {@link Scope} is implemented as a decorator of {@code Provider}, which decides - * when to delegate to the backing provider and when to provide the instance some other way. *
  • The {@link Injector} offers access to the {@code Provider} it uses to fulfill requests * for a given key, via the {@link Injector#getProvider} methods. * diff --git a/server/src/main/java/org/elasticsearch/injection/guice/Scope.java b/server/src/main/java/org/elasticsearch/injection/guice/Scope.java deleted file mode 100644 index 681fc17bc635..000000000000 --- a/server/src/main/java/org/elasticsearch/injection/guice/Scope.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright (C) 2006 Google Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.elasticsearch.injection.guice; - -/** - * A scope is a level of visibility that instances provided by Guice may have. - * By default, an instance created by the {@link Injector} has no scope, - * meaning it has no state from the framework's perspective -- the - * {@code Injector} creates it, injects it once into the class that required it, - * and then immediately forgets it. Associating a scope with a particular - * binding allows the created instance to be "remembered" and possibly used - * again for other injections. - *

    - * An example of a scope is {@link Scopes#SINGLETON}. - * - * @author crazybob@google.com (Bob Lee) - */ -public interface Scope { - - /** - * Scopes a provider. The returned provider returns objects from this scope. - * If an object does not exist in this scope, the provider can use the given - * unscoped provider to retrieve one. - *

    - * Scope implementations are strongly encouraged to override - * {@link Object#toString} in the returned provider and include the backing - * provider's {@code toString()} output. - * - * @param unscoped locates an instance when one doesn't already exist in this - * scope. - * @return a new provider which only delegates to the given unscoped provider - * when an instance of the requested object doesn't already exist in this - * scope - */ - Provider scope(Provider unscoped); - - /** - * A short but useful description of this scope. For comparison, the standard - * scopes that ship with guice use the descriptions - * {@code "Scopes.SINGLETON"}, {@code "ServletScopes.SESSION"} and - * {@code "ServletScopes.REQUEST"}. - */ - @Override - String toString(); -} diff --git a/server/src/main/java/org/elasticsearch/injection/guice/Scopes.java b/server/src/main/java/org/elasticsearch/injection/guice/Scopes.java index d5b61407b497..5f05d0337654 100644 --- a/server/src/main/java/org/elasticsearch/injection/guice/Scopes.java +++ b/server/src/main/java/org/elasticsearch/injection/guice/Scopes.java @@ -19,8 +19,6 @@ package org.elasticsearch.injection.guice; import org.elasticsearch.injection.guice.internal.InternalFactory; import org.elasticsearch.injection.guice.internal.Scoping; -import java.util.Locale; - /** * Built-in scope implementations. * @@ -31,29 +29,27 @@ public class Scopes { private Scopes() {} /** - * One instance per {@link Injector}. + * Scopes an internal factory. */ - public static final Scope SINGLETON = new Scope() { - @Override - public Provider scope(final Provider creator) { - return new Provider() { + static InternalFactory scope(InjectorImpl injector, InternalFactory creator, Scoping scoping) { + return switch (scoping) { + case UNSCOPED -> creator; + case EAGER_SINGLETON -> new InternalFactoryToProviderAdapter<>(Initializables.of(new Provider<>() { private volatile T instance; - // DCL on a volatile is safe as of Java 5, which we obviously require. @Override - @SuppressWarnings("DoubleCheckedLocking") public T get() { if (instance == null) { /* - * Use a pretty coarse lock. We don't want to run into deadlocks - * when two threads try to load circularly-dependent objects. - * Maybe one of these days we will identify independent graphs of - * objects and offer to load them in parallel. - */ - synchronized (InjectorImpl.class) { + * Use a pretty coarse lock. We don't want to run into deadlocks + * when two threads try to load circularly-dependent objects. + * Maybe one of these days we will identify independent graphs of + * objects and offer to load them in parallel. + */ + synchronized (injector) { if (instance == null) { - instance = creator.get(); + instance = new ProviderToInternalFactoryAdapter<>(injector, creator).get(); } } } @@ -62,54 +58,10 @@ public class Scopes { @Override public String toString() { - return String.format(Locale.ROOT, "%s[%s]", creator, SINGLETON); + return creator + "[SINGLETON]"; } - }; - } - - @Override - public String toString() { - return "Scopes.SINGLETON"; - } - }; - - /** - * No scope; the same as not applying any scope at all. Each time the - * Injector obtains an instance of an object with "no scope", it injects this - * instance then immediately forgets it. When the next request for the same - * binding arrives it will need to obtain the instance over again. - *

    - * This exists only in case a class has been annotated with a scope - * annotation and you need to override this to "no scope" in your binding. - * - * @since 2.0 - */ - public static final Scope NO_SCOPE = new Scope() { - @Override - public Provider scope(Provider unscoped) { - return unscoped; - } - - @Override - public String toString() { - return "Scopes.NO_SCOPE"; - } - }; - - /** - * Scopes an internal factory. - */ - static InternalFactory scope(InjectorImpl injector, InternalFactory creator, Scoping scoping) { - - if (scoping.isNoScope()) { - return creator; - } - - Scope scope = scoping.getScopeInstance(); - - // TODO: use diamond operator once JI-9019884 is fixed - Provider scoped = scope.scope(new ProviderToInternalFactoryAdapter(injector, creator)); - return new InternalFactoryToProviderAdapter<>(Initializables.of(scoped)); + })); + }; } } diff --git a/server/src/main/java/org/elasticsearch/injection/guice/internal/AbstractBindingBuilder.java b/server/src/main/java/org/elasticsearch/injection/guice/internal/AbstractBindingBuilder.java index 28053c5f1d55..ee54c8aa9352 100644 --- a/server/src/main/java/org/elasticsearch/injection/guice/internal/AbstractBindingBuilder.java +++ b/server/src/main/java/org/elasticsearch/injection/guice/internal/AbstractBindingBuilder.java @@ -77,7 +77,7 @@ public abstract class AbstractBindingBuilder { return; } - if (binding.getScoping().isExplicitlyScoped()) { + if (binding.getScoping() != Scoping.UNSCOPED) { binder.addError(SCOPE_ALREADY_SET); } } diff --git a/server/src/main/java/org/elasticsearch/injection/guice/internal/Scoping.java b/server/src/main/java/org/elasticsearch/injection/guice/internal/Scoping.java index fcb03f34f420..e1c04ea8e348 100644 --- a/server/src/main/java/org/elasticsearch/injection/guice/internal/Scoping.java +++ b/server/src/main/java/org/elasticsearch/injection/guice/internal/Scoping.java @@ -16,8 +16,7 @@ package org.elasticsearch.injection.guice.internal; -import org.elasticsearch.injection.guice.Scope; -import org.elasticsearch.injection.guice.Scopes; +import org.elasticsearch.injection.guice.Injector; /** * References a scope, either directly (as a scope instance), or indirectly (as a scope annotation). @@ -25,69 +24,14 @@ import org.elasticsearch.injection.guice.Scopes; * * @author jessewilson@google.com (Jesse Wilson) */ -public abstract class Scoping { - +public enum Scoping { /** * No scoping annotation has been applied. Note that this is different from {@code * in(Scopes.NO_SCOPE)}, where the 'NO_SCOPE' has been explicitly applied. */ - public static final Scoping UNSCOPED = new Scoping() { - - @Override - public Scope getScopeInstance() { - return Scopes.NO_SCOPE; - } - - @Override - public String toString() { - return Scopes.NO_SCOPE.toString(); - } - - }; - - public static final Scoping EAGER_SINGLETON = new Scoping() { - - @Override - public Scope getScopeInstance() { - return Scopes.SINGLETON; - } - - @Override - public String toString() { - return "eager singleton"; - } - - }; - + UNSCOPED, /** - * Returns true if this scope was explicitly applied. If no scope was explicitly applied then the - * scoping annotation will be used. + * One instance per {@link Injector}. */ - public boolean isExplicitlyScoped() { - return this != UNSCOPED; - } - - /** - * Returns true if this is the default scope. In this case a new instance will be provided for - * each injection. - */ - public boolean isNoScope() { - return getScopeInstance() == Scopes.NO_SCOPE; - } - - /** - * Returns true if this scope is a singleton that should be loaded eagerly in {@code stage}. - */ - public boolean isEagerSingleton() { - return this == EAGER_SINGLETON; - } - - /** - * Returns the scope instance, or {@code null} if that isn't known for this instance. - */ - public Scope getScopeInstance() { - return null; - } - - private Scoping() {} + EAGER_SINGLETON } diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginsLoader.java b/server/src/main/java/org/elasticsearch/plugins/PluginsLoader.java index aadda93f977b..c7dc2c405ffb 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginsLoader.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginsLoader.java @@ -122,7 +122,8 @@ public class PluginsLoader { private final List moduleDescriptors; private final List pluginDescriptors; private final Map loadedPluginLayers; - private final Set allBundles; + private final Set moduleBundles; + private final Set pluginBundles; /** * Constructs a new PluginsLoader @@ -153,37 +154,36 @@ public class PluginsLoader { Set seenBundles = new LinkedHashSet<>(); // load (elasticsearch) module layers - List moduleDescriptors; + final Set modules; if (modulesDirectory != null) { try { - Set modules = PluginsUtils.getModuleBundles(modulesDirectory); - moduleDescriptors = modules.stream().map(PluginBundle::pluginDescriptor).toList(); + modules = PluginsUtils.getModuleBundles(modulesDirectory); seenBundles.addAll(modules); } catch (IOException ex) { throw new IllegalStateException("Unable to initialize modules", ex); } } else { - moduleDescriptors = Collections.emptyList(); + modules = Collections.emptySet(); } // load plugin layers - List pluginDescriptors; + final Set plugins; if (pluginsDirectory != null) { try { // TODO: remove this leniency, but tests bogusly rely on it if (isAccessibleDirectory(pluginsDirectory, logger)) { PluginsUtils.checkForFailedPluginRemovals(pluginsDirectory); - Set plugins = PluginsUtils.getPluginBundles(pluginsDirectory); - pluginDescriptors = plugins.stream().map(PluginBundle::pluginDescriptor).toList(); + plugins = PluginsUtils.getPluginBundles(pluginsDirectory); + seenBundles.addAll(plugins); } else { - pluginDescriptors = Collections.emptyList(); + plugins = Collections.emptySet(); } } catch (IOException ex) { throw new IllegalStateException("Unable to initialize plugins", ex); } } else { - pluginDescriptors = Collections.emptyList(); + plugins = Collections.emptySet(); } Map loadedPluginLayers = new LinkedHashMap<>(); @@ -197,19 +197,15 @@ public class PluginsLoader { } } - return new PluginsLoader(moduleDescriptors, pluginDescriptors, loadedPluginLayers, Set.copyOf(seenBundles)); + return new PluginsLoader(modules, plugins, loadedPluginLayers); } - PluginsLoader( - List moduleDescriptors, - List pluginDescriptors, - Map loadedPluginLayers, - Set allBundles - ) { - this.moduleDescriptors = moduleDescriptors; - this.pluginDescriptors = pluginDescriptors; + PluginsLoader(Set modules, Set plugins, Map loadedPluginLayers) { + this.moduleBundles = modules; + this.pluginBundles = plugins; + this.moduleDescriptors = modules.stream().map(PluginBundle::pluginDescriptor).toList(); + this.pluginDescriptors = plugins.stream().map(PluginBundle::pluginDescriptor).toList(); this.loadedPluginLayers = loadedPluginLayers; - this.allBundles = allBundles; } public List moduleDescriptors() { @@ -224,8 +220,12 @@ public class PluginsLoader { return loadedPluginLayers.values().stream().map(Function.identity()); } - public Set allBundles() { - return allBundles; + public Set moduleBundles() { + return moduleBundles; + } + + public Set pluginBundles() { + return pluginBundles; } private static void loadPluginLayer( @@ -416,7 +416,7 @@ public class PluginsLoader { return result; } - static final String toPackageName(String className) { + static String toPackageName(String className) { assert className.endsWith(".") == false; int index = className.lastIndexOf('.'); if (index == -1) { @@ -426,11 +426,11 @@ public class PluginsLoader { } @SuppressForbidden(reason = "I need to convert URL's to Paths") - static final Path[] urlsToPaths(Set urls) { + static Path[] urlsToPaths(Set urls) { return urls.stream().map(PluginsLoader::uncheckedToURI).map(PathUtils::get).toArray(Path[]::new); } - static final URI uncheckedToURI(URL url) { + static URI uncheckedToURI(URL url) { try { return url.toURI(); } catch (URISyntaxException e) { diff --git a/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java b/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java index f5670ebd8a54..e87e9ee85b29 100644 --- a/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java @@ -23,6 +23,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParser; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; +import org.elasticsearch.plugins.internal.rewriter.QueryRewriteInterceptor; import org.elasticsearch.search.SearchExtBuilder; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -128,6 +129,14 @@ public interface SearchPlugin { return emptyList(); } + /** + * @return Applicable {@link QueryRewriteInterceptor}s configured for this plugin. + * Note: This is internal to Elasticsearch's API and not extensible by external plugins. + */ + default List getQueryRewriteInterceptors() { + return emptyList(); + } + /** * The new {@link Aggregation}s added by this plugin. */ diff --git a/server/src/main/java/org/elasticsearch/plugins/internal/rewriter/QueryRewriteInterceptor.java b/server/src/main/java/org/elasticsearch/plugins/internal/rewriter/QueryRewriteInterceptor.java new file mode 100644 index 000000000000..8f4fb2ce7491 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/plugins/internal/rewriter/QueryRewriteInterceptor.java @@ -0,0 +1,75 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.plugins.internal.rewriter; + +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryRewriteContext; + +import java.util.Map; + +/** + * Enables modules and plugins to intercept and rewrite queries during the query rewrite phase on the coordinator node. + */ +public interface QueryRewriteInterceptor { + + /** + * Intercepts and returns a rewritten query if modifications are required; otherwise, + * returns the same provided {@link QueryBuilder} instance unchanged. + * + * @param context the {@link QueryRewriteContext} providing the context for the rewrite operation + * @param queryBuilder the original {@link QueryBuilder} to potentially rewrite + * @return the rewritten {@link QueryBuilder}, or the original instance if no rewrite was needed + */ + QueryBuilder interceptAndRewrite(QueryRewriteContext context, QueryBuilder queryBuilder); + + /** + * Name of the query to be intercepted and rewritten. + */ + String getQueryName(); + + static QueryRewriteInterceptor multi(Map interceptors) { + return interceptors.isEmpty() ? new NoOpQueryRewriteInterceptor() : new CompositeQueryRewriteInterceptor(interceptors); + } + + class CompositeQueryRewriteInterceptor implements QueryRewriteInterceptor { + final String NAME = "composite"; + private final Map interceptors; + + private CompositeQueryRewriteInterceptor(Map interceptors) { + this.interceptors = interceptors; + } + + @Override + public String getQueryName() { + return NAME; + } + + @Override + public QueryBuilder interceptAndRewrite(QueryRewriteContext context, QueryBuilder queryBuilder) { + QueryRewriteInterceptor interceptor = interceptors.get(queryBuilder.getName()); + if (interceptor != null) { + return interceptor.interceptAndRewrite(context, queryBuilder); + } + return queryBuilder; + } + } + + class NoOpQueryRewriteInterceptor implements QueryRewriteInterceptor { + @Override + public QueryBuilder interceptAndRewrite(QueryRewriteContext context, QueryBuilder queryBuilder) { + return queryBuilder; + } + + @Override + public String getQueryName() { + return null; + } + } +} diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesModule.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesModule.java index e767bf5e3723..b236b1fa730f 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesModule.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesModule.java @@ -102,7 +102,9 @@ public final class RepositoriesModule { } if (preRestoreChecks.isEmpty()) { preRestoreChecks.add((snapshot, version) -> { - if (version.isLegacyIndexVersion()) { + // pre-restore checks will be run against the version in which the snapshot was created as well as + // the version in which the restored index was created + if (version.before(IndexVersions.MINIMUM_COMPATIBLE)) { throw new SnapshotRestoreException( snapshot, "the snapshot was created with Elasticsearch version [" diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java index e30d2f8d5c73..ba6fed3ea35d 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestCreateIndexAction.java @@ -78,10 +78,7 @@ public class RestCreateIndexAction extends BaseRestHandler { Map mappings = (Map) source.get("mappings"); if (MapperService.isMappingSourceTyped(MapperService.SINGLE_MAPPING_NAME, mappings)) { throw new IllegalArgumentException( - "The mapping definition cannot be nested under a type " - + "[" - + MapperService.SINGLE_MAPPING_NAME - + "] unless include_type_name is set to true." + "The mapping definition cannot be nested under a type [" + MapperService.SINGLE_MAPPING_NAME + "]." ); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java index 5f648ca8e77e..0c7772bc3a69 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java @@ -9,15 +9,12 @@ package org.elasticsearch.rest.action.admin.indices; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetadata; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -35,13 +32,6 @@ import static org.elasticsearch.rest.RestStatus.OK; public class RestGetFieldMappingAction extends BaseRestHandler { - private static final Logger logger = LogManager.getLogger(RestGetFieldMappingAction.class); - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(logger.getName()); - public static final String INCLUDE_TYPE_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in get " - + "field mapping requests is deprecated. The parameter will be removed in the next major version."; - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in get field mapping request is deprecated. " - + "Use typeless api instead"; - @Override public List routes() { return List.of(new Route(GET, "/_mapping/field/{fields}"), new Route(GET, "/{index}/_mapping/field/{fields}")); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java index 9ca890eaff65..9be3462e97e0 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetIndicesAction.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.admin.indices.get.GetIndexRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -35,9 +34,6 @@ import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; */ @ServerlessScope(Scope.PUBLIC) public class RestGetIndicesAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestGetIndicesAction.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using `include_type_name` in get indices requests" - + " is deprecated. The parameter will be removed in the next major version."; @Override public List routes() { diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java index 242bcd399413..27620fa750ea 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.core.TimeValue; import org.elasticsearch.http.HttpChannel; import org.elasticsearch.rest.BaseRestHandler; @@ -31,11 +30,6 @@ import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; @ServerlessScope(Scope.PUBLIC) public class RestGetMappingAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestGetMappingAction.class); - public static final String INCLUDE_TYPE_DEPRECATION_MSG = "[types removal] Using include_type_name in get" - + " mapping requests is deprecated. The parameter will be removed in the next major version."; - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in get mapping request is deprecated. " - + "Use typeless api instead"; public RestGetMappingAction() {} diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java index ee1710f39ce4..5fd4ec83c1a1 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestResizeHandler.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.admin.indices.shrink.ResizeRequest; import org.elasticsearch.action.admin.indices.shrink.ResizeType; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; @@ -27,7 +26,6 @@ import static org.elasticsearch.rest.RestUtils.getAckTimeout; import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; public abstract class RestResizeHandler extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestResizeHandler.class); RestResizeHandler() {} diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java index 776302296b1a..39d7b1d5851d 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestRolloverIndexAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.metadata.DataStream; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -33,10 +32,6 @@ import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout; @ServerlessScope(Scope.PUBLIC) public class RestRolloverIndexAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestRolloverIndexAction.class); - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in rollover " - + "index requests is deprecated. The parameter will be removed in the next major version."; - @Override public List routes() { return List.of(new Route(POST, "/{index}/_rollover"), new Route(POST, "/{index}/_rollover/{new_index}")); diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteConnectionInfo.java b/server/src/main/java/org/elasticsearch/transport/RemoteConnectionInfo.java index 7a579cbb98bc..ae078479ba85 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteConnectionInfo.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteConnectionInfo.java @@ -18,8 +18,6 @@ import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; -import java.util.Arrays; -import java.util.List; import java.util.Objects; /** @@ -49,25 +47,14 @@ public final class RemoteConnectionInfo implements ToXContentFragment, Writeable } public RemoteConnectionInfo(StreamInput input) throws IOException { - if (input.getTransportVersion().onOrAfter(TransportVersions.V_7_6_0)) { - RemoteConnectionStrategy.ConnectionStrategy mode = input.readEnum(RemoteConnectionStrategy.ConnectionStrategy.class); - modeInfo = mode.getReader().read(input); - initialConnectionTimeout = input.readTimeValue(); - clusterAlias = input.readString(); - skipUnavailable = input.readBoolean(); - if (input.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { - hasClusterCredentials = input.readBoolean(); - } else { - hasClusterCredentials = false; - } + RemoteConnectionStrategy.ConnectionStrategy mode = input.readEnum(RemoteConnectionStrategy.ConnectionStrategy.class); + modeInfo = mode.getReader().read(input); + initialConnectionTimeout = input.readTimeValue(); + clusterAlias = input.readString(); + skipUnavailable = input.readBoolean(); + if (input.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { + hasClusterCredentials = input.readBoolean(); } else { - List seedNodes = Arrays.asList(input.readStringArray()); - int connectionsPerCluster = input.readVInt(); - initialConnectionTimeout = input.readTimeValue(); - int numNodesConnected = input.readVInt(); - clusterAlias = input.readString(); - skipUnavailable = input.readBoolean(); - modeInfo = new SniffConnectionStrategy.SniffModeInfo(seedNodes, connectionsPerCluster, numNodesConnected); hasClusterCredentials = false; } } @@ -90,24 +77,9 @@ public final class RemoteConnectionInfo implements ToXContentFragment, Writeable @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_6_0)) { - out.writeEnum(modeInfo.modeType()); - modeInfo.writeTo(out); - out.writeTimeValue(initialConnectionTimeout); - } else { - if (modeInfo.modeType() == RemoteConnectionStrategy.ConnectionStrategy.SNIFF) { - SniffConnectionStrategy.SniffModeInfo sniffInfo = (SniffConnectionStrategy.SniffModeInfo) this.modeInfo; - out.writeStringCollection(sniffInfo.seedNodes); - out.writeVInt(sniffInfo.maxConnectionsPerCluster); - out.writeTimeValue(initialConnectionTimeout); - out.writeVInt(sniffInfo.numNodesConnected); - } else { - out.writeStringArray(new String[0]); - out.writeVInt(0); - out.writeTimeValue(initialConnectionTimeout); - out.writeVInt(0); - } - } + out.writeEnum(modeInfo.modeType()); + modeInfo.writeTo(out); + out.writeTimeValue(initialConnectionTimeout); out.writeString(clusterAlias); out.writeBoolean(skipUnavailable); if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java index 7640c1e7af30..550f8e584362 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java @@ -12,24 +12,16 @@ package org.elasticsearch.action.admin.indices.mapping.get; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse.FieldMappingMetadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.json.JsonXContent; import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; -import static org.hamcrest.Matchers.hasKey; - public class GetFieldMappingsResponseTests extends AbstractWireSerializingTestCase { public void testManualSerialization() throws IOException { @@ -56,36 +48,6 @@ public class GetFieldMappingsResponseTests extends AbstractWireSerializingTestCa assertEquals("{\"index\":{\"mappings\":{}}}", Strings.toString(response)); } - public void testToXContentIncludesType() throws Exception { - Map> mappings = new HashMap<>(); - FieldMappingMetadata fieldMappingMetadata = new FieldMappingMetadata("my field", new BytesArray("{}")); - mappings.put("index", Collections.singletonMap("field", fieldMappingMetadata)); - GetFieldMappingsResponse response = new GetFieldMappingsResponse(mappings); - ToXContent.Params params = new ToXContent.MapParams(Collections.singletonMap("include_type_name", "true")); - - // v8 does not have _doc, even when include_type_name is present - // (although this throws unconsumed parameter exception in RestGetFieldMappingsAction) - try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent, RestApiVersion.V_8)) { - response.toXContent(builder, params); - - try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { - @SuppressWarnings("unchecked") - Map> index = (Map>) parser.map().get("index"); - assertThat(index.get("mappings"), hasKey("field")); - } - } - - try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent, RestApiVersion.V_8)) { - response.toXContent(builder, ToXContent.EMPTY_PARAMS); - - try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { - @SuppressWarnings("unchecked") - Map> index = (Map>) parser.map().get("index"); - assertThat(index.get("mappings"), hasKey("field")); - } - } - } - @Override protected GetFieldMappingsResponse createTestInstance() { return new GetFieldMappingsResponse(randomMapping()); diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java index dc00f35da822..d81692dd29b5 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java @@ -1758,7 +1758,9 @@ public class TransportSearchActionTests extends ESTestCase { NodeClient client = new NodeClient(settings, threadPool); SearchService searchService = mock(SearchService.class); - when(searchService.getRewriteContext(any(), any(), any())).thenReturn(new QueryRewriteContext(null, null, null, null, null)); + when(searchService.getRewriteContext(any(), any(), any())).thenReturn( + new QueryRewriteContext(null, null, null, null, null, null) + ); ClusterService clusterService = new ClusterService( settings, new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataTests.java index 8036a964071d..4abd0c4a9d46 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataTests.java @@ -690,7 +690,12 @@ public class IndexMetadataTests extends ESTestCase { } private static InferenceFieldMetadata randomInferenceFieldMetadata(String name) { - return new InferenceFieldMetadata(name, randomIdentifier(), randomSet(1, 5, ESTestCase::randomIdentifier).toArray(String[]::new)); + return new InferenceFieldMetadata( + name, + randomIdentifier(), + randomIdentifier(), + randomSet(1, 5, ESTestCase::randomIdentifier).toArray(String[]::new) + ); } private IndexMetadataStats randomIndexStats(int numberOfShards) { diff --git a/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java b/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java index 3264cf168b63..5101064f293e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java @@ -435,7 +435,6 @@ public class DiscoveryNodesTests extends ESTestCase { assertEquals(Version.fromString("5.0.17"), build.getMaxNodeVersion()); assertEquals(Version.fromString("1.6.0"), build.getMinNodeVersion()); - assertEquals(Version.fromString("2.1.0"), build.getSmallestNonClientNodeVersion()); // doesn't include 1.6.0 observer assertEquals(IndexVersion.fromId(2010099), build.getMaxDataNodeCompatibleIndexVersion()); // doesn't include 2000199 observer assertEquals(IndexVersion.fromId(2000099), build.getMinSupportedIndexVersion()); // also includes observers } diff --git a/server/src/test/java/org/elasticsearch/common/lucene/store/InputStreamIndexInputTests.java b/server/src/test/java/org/elasticsearch/common/lucene/store/InputStreamIndexInputTests.java index 4bea6f50c7c4..b982bd7b95aa 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/store/InputStreamIndexInputTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/store/InputStreamIndexInputTests.java @@ -11,6 +11,7 @@ package org.elasticsearch.common.lucene.store; import org.apache.lucene.store.ByteBuffersDirectory; import org.apache.lucene.store.Directory; +import org.apache.lucene.store.FilterIndexInput; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; @@ -267,17 +268,47 @@ public class InputStreamIndexInputTests extends ESTestCase { skipBytesExpected ); - IndexInput input = dir.openInput("test", IOContext.DEFAULT); - InputStreamIndexInput is = new InputStreamIndexInput(input, limit); + var countingInput = new CountingReadBytesIndexInput("test", dir.openInput("test", IOContext.DEFAULT)); + InputStreamIndexInput is = new InputStreamIndexInput(countingInput, limit); is.readNBytes(initialReadBytes); assertThat(is.skip(skipBytes), equalTo((long) skipBytesExpected)); + long expectedActualInitialBytesRead = Math.min(Math.min(initialReadBytes, limit), bytes); + assertThat(countingInput.getBytesRead(), equalTo(expectedActualInitialBytesRead)); int remainingBytes = Math.min(bytes, limit) - seekExpected; for (int i = seekExpected; i < seekExpected + remainingBytes; i++) { assertThat(is.read(), equalTo(i)); } + assertThat(countingInput.getBytesRead(), equalTo(expectedActualInitialBytesRead + remainingBytes)); } + protected static class CountingReadBytesIndexInput extends FilterIndexInput { + private long bytesRead = 0; + + public CountingReadBytesIndexInput(String resourceDescription, IndexInput in) { + super(resourceDescription, in); + } + + @Override + public byte readByte() throws IOException { + long filePointerBefore = getFilePointer(); + byte b = super.readByte(); + bytesRead += getFilePointer() - filePointerBefore; + return b; + } + + @Override + public void readBytes(byte[] b, int offset, int len) throws IOException { + long filePointerBefore = getFilePointer(); + super.readBytes(b, offset, len); + bytesRead += getFilePointer() - filePointerBefore; + } + + public long getBytesRead() { + return bytesRead; + } + }; + public void testReadZeroShouldReturnZero() throws IOException { try (Directory dir = new ByteBuffersDirectory()) { try (IndexOutput output = dir.createOutput("test", IOContext.DEFAULT)) { diff --git a/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java index 49a4d519c0ea..c519d4834148 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -86,6 +86,7 @@ import org.elasticsearch.indices.cluster.IndicesClusterStateService.AllocatedInd import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.plugins.IndexStorePlugin; +import org.elasticsearch.plugins.internal.rewriter.MockQueryRewriteInterceptor; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.internal.ReaderContext; import org.elasticsearch.test.ClusterServiceUtils; @@ -223,7 +224,8 @@ public class IndexModuleTests extends ESTestCase { module.indexSettings().getMode().idFieldMapperWithoutFieldData(), null, indexDeletionListener, - emptyMap() + emptyMap(), + new MockQueryRewriteInterceptor() ); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupInferenceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupInferenceFieldMapperTests.java index 809fb161fcbe..b1470c1ee5b3 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupInferenceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupInferenceFieldMapperTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; -import java.io.IOException; import java.util.Collection; import java.util.List; import java.util.Map; @@ -94,6 +93,7 @@ public class MappingLookupInferenceFieldMapperTests extends MapperServiceTestCas public static final TypeParser PARSER = new TypeParser((n, c) -> new Builder(n)); public static final String INFERENCE_ID = "test_inference_id"; + public static final String SEARCH_INFERENCE_ID = "test_search_inference_id"; public static final String CONTENT_TYPE = "test_inference_field"; TestInferenceFieldMapper(String simpleName) { @@ -102,7 +102,7 @@ public class MappingLookupInferenceFieldMapperTests extends MapperServiceTestCas @Override public InferenceFieldMetadata getMetadata(Set sourcePaths) { - return new InferenceFieldMetadata(fullPath(), INFERENCE_ID, sourcePaths.toArray(new String[0])); + return new InferenceFieldMetadata(fullPath(), INFERENCE_ID, SEARCH_INFERENCE_ID, sourcePaths.toArray(new String[0])); } @Override @@ -111,7 +111,7 @@ public class MappingLookupInferenceFieldMapperTests extends MapperServiceTestCas } @Override - protected void parseCreateField(DocumentParserContext context) throws IOException {} + protected void parseCreateField(DocumentParserContext context) {} @Override public Builder getMergeBuilder() { diff --git a/server/src/test/java/org/elasticsearch/index/query/InterceptedQueryBuilderWrapperTests.java b/server/src/test/java/org/elasticsearch/index/query/InterceptedQueryBuilderWrapperTests.java new file mode 100644 index 000000000000..6c570e0e7172 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/query/InterceptedQueryBuilderWrapperTests.java @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.query; + +import org.elasticsearch.plugins.internal.rewriter.QueryRewriteInterceptor; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.client.NoOpClient; +import org.elasticsearch.threadpool.TestThreadPool; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; + +public class InterceptedQueryBuilderWrapperTests extends ESTestCase { + + private TestThreadPool threadPool; + private NoOpClient client; + + @Before + public void setup() { + threadPool = createThreadPool(); + client = new NoOpClient(threadPool); + } + + @After + public void cleanup() { + threadPool.close(); + } + + public void testQueryNameReturnsWrappedQueryBuilder() { + MatchAllQueryBuilder matchAllQueryBuilder = new MatchAllQueryBuilder(); + InterceptedQueryBuilderWrapper interceptedQueryBuilderWrapper = new InterceptedQueryBuilderWrapper(matchAllQueryBuilder); + String queryName = randomAlphaOfLengthBetween(5, 10); + QueryBuilder namedQuery = interceptedQueryBuilderWrapper.queryName(queryName); + assertTrue(namedQuery instanceof InterceptedQueryBuilderWrapper); + assertEquals(queryName, namedQuery.queryName()); + } + + public void testQueryBoostReturnsWrappedQueryBuilder() { + MatchAllQueryBuilder matchAllQueryBuilder = new MatchAllQueryBuilder(); + InterceptedQueryBuilderWrapper interceptedQueryBuilderWrapper = new InterceptedQueryBuilderWrapper(matchAllQueryBuilder); + float boost = randomFloat(); + QueryBuilder boostedQuery = interceptedQueryBuilderWrapper.boost(boost); + assertTrue(boostedQuery instanceof InterceptedQueryBuilderWrapper); + assertEquals(boost, boostedQuery.boost(), 0.0001f); + } + + public void testRewrite() throws IOException { + QueryRewriteContext context = new QueryRewriteContext(null, client, null); + context.setQueryRewriteInterceptor(myMatchInterceptor); + + // Queries that are not intercepted behave normally + TermQueryBuilder termQueryBuilder = new TermQueryBuilder("field", "value"); + QueryBuilder rewritten = termQueryBuilder.rewrite(context); + assertTrue(rewritten instanceof TermQueryBuilder); + + // Queries that should be intercepted are and the right thing happens + MatchQueryBuilder matchQueryBuilder = new MatchQueryBuilder("field", "value"); + rewritten = matchQueryBuilder.rewrite(context); + assertTrue(rewritten instanceof InterceptedQueryBuilderWrapper); + assertTrue(((InterceptedQueryBuilderWrapper) rewritten).queryBuilder instanceof MatchQueryBuilder); + MatchQueryBuilder rewrittenMatchQueryBuilder = (MatchQueryBuilder) ((InterceptedQueryBuilderWrapper) rewritten).queryBuilder; + assertEquals("intercepted", rewrittenMatchQueryBuilder.value()); + + // An additional rewrite on an already intercepted query returns the same query + QueryBuilder rewrittenAgain = rewritten.rewrite(context); + assertTrue(rewrittenAgain instanceof InterceptedQueryBuilderWrapper); + assertEquals(rewritten, rewrittenAgain); + } + + private final QueryRewriteInterceptor myMatchInterceptor = new QueryRewriteInterceptor() { + @Override + public QueryBuilder interceptAndRewrite(QueryRewriteContext context, QueryBuilder queryBuilder) { + if (queryBuilder instanceof MatchQueryBuilder matchQueryBuilder) { + return new MatchQueryBuilder(matchQueryBuilder.fieldName(), "intercepted"); + } + return queryBuilder; + } + + @Override + public String getQueryName() { + return MatchQueryBuilder.NAME; + } + }; +} diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryRewriteContextTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryRewriteContextTests.java index d07bcf54fdf0..5dd231ab9788 100644 --- a/server/src/test/java/org/elasticsearch/index/query/QueryRewriteContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/QueryRewriteContextTests.java @@ -52,6 +52,7 @@ public class QueryRewriteContextTests extends ESTestCase { null, null, null, + null, null ); @@ -79,6 +80,7 @@ public class QueryRewriteContextTests extends ESTestCase { null, null, null, + null, null ); diff --git a/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java b/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java index 059cb15551ac..b7d63b7d612c 100644 --- a/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java @@ -9,12 +9,45 @@ package org.elasticsearch.plugins; +import org.apache.lucene.tests.util.LuceneTestCase; +import org.elasticsearch.Version; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; +import org.elasticsearch.plugin.analysis.CharFilterFactory; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.PrivilegedOperations; +import org.elasticsearch.test.compiler.InMemoryJavaCompiler; +import org.elasticsearch.test.jar.JarUtils; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.net.URLClassLoader; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Map; + +import static java.util.Map.entry; +import static org.elasticsearch.test.LambdaMatchers.transformedMatch; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +@ESTestCase.WithoutSecurityManager +@LuceneTestCase.SuppressFileSystems(value = "ExtrasFS") public class PluginsLoaderTests extends ESTestCase { + private static final Logger logger = LogManager.getLogger(PluginsLoaderTests.class); + + static PluginsLoader newPluginsLoader(Settings settings) { + return PluginsLoader.createPluginsLoader(null, TestEnvironment.newEnvironment(settings).pluginsFile(), false); + } + public void testToModuleName() { assertThat(PluginsLoader.toModuleName("module.name"), equalTo("module.name")); assertThat(PluginsLoader.toModuleName("module-name"), equalTo("module.name")); @@ -28,4 +61,220 @@ public class PluginsLoaderTests extends ESTestCase { assertThat(PluginsLoader.toModuleName("_module_name"), equalTo("_module_name")); assertThat(PluginsLoader.toModuleName("_"), equalTo("_")); } + + public void testStablePluginLoading() throws Exception { + final Path home = createTempDir(); + final Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), home).build(); + final Path plugins = home.resolve("plugins"); + final Path plugin = plugins.resolve("stable-plugin"); + Files.createDirectories(plugin); + PluginTestUtil.writeStablePluginProperties( + plugin, + "description", + "description", + "name", + "stable-plugin", + "version", + "1.0.0", + "elasticsearch.version", + Version.CURRENT.toString(), + "java.version", + System.getProperty("java.specification.version") + ); + + Path jar = plugin.resolve("impl.jar"); + JarUtils.createJarWithEntries(jar, Map.of("p/A.class", InMemoryJavaCompiler.compile("p.A", """ + package p; + import java.util.Map; + import org.elasticsearch.plugin.analysis.CharFilterFactory; + import org.elasticsearch.plugin.NamedComponent; + import java.io.Reader; + @NamedComponent( "a_name") + public class A implements CharFilterFactory { + @Override + public Reader create(Reader reader) { + return reader; + } + } + """))); + Path namedComponentFile = plugin.resolve("named_components.json"); + Files.writeString(namedComponentFile, """ + { + "org.elasticsearch.plugin.analysis.CharFilterFactory": { + "a_name": "p.A" + } + } + """); + + var pluginsLoader = newPluginsLoader(settings); + try { + var loadedLayers = pluginsLoader.pluginLayers().toList(); + + assertThat(loadedLayers, hasSize(1)); + assertThat(loadedLayers.get(0).pluginBundle().pluginDescriptor().getName(), equalTo("stable-plugin")); + assertThat(loadedLayers.get(0).pluginBundle().pluginDescriptor().isStable(), is(true)); + + assertThat(pluginsLoader.pluginDescriptors(), hasSize(1)); + assertThat(pluginsLoader.pluginDescriptors().get(0).getName(), equalTo("stable-plugin")); + assertThat(pluginsLoader.pluginDescriptors().get(0).isStable(), is(true)); + + var pluginClassLoader = loadedLayers.get(0).pluginClassLoader(); + var pluginModuleLayer = loadedLayers.get(0).pluginModuleLayer(); + assertThat(pluginClassLoader, instanceOf(UberModuleClassLoader.class)); + assertThat(pluginModuleLayer, is(not(ModuleLayer.boot()))); + assertThat(pluginModuleLayer.modules(), contains(transformedMatch(Module::getName, equalTo("synthetic.stable.plugin")))); + + if (CharFilterFactory.class.getModule().isNamed() == false) { + // test frameworks run with stable api classes on classpath, so we + // have no choice but to let our class read the unnamed module that + // owns the stable api classes + ((UberModuleClassLoader) pluginClassLoader).addReadsSystemClassLoaderUnnamedModule(); + } + + Class stableClass = pluginClassLoader.loadClass("p.A"); + assertThat(stableClass.getModule().getName(), equalTo("synthetic.stable.plugin")); + } finally { + closePluginLoaders(pluginsLoader); + } + } + + public void testModularPluginLoading() throws Exception { + final Path home = createTempDir(); + final Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), home).build(); + final Path plugins = home.resolve("plugins"); + final Path plugin = plugins.resolve("modular-plugin"); + Files.createDirectories(plugin); + PluginTestUtil.writePluginProperties( + plugin, + "description", + "description", + "name", + "modular-plugin", + "classname", + "p.A", + "modulename", + "modular.plugin", + "version", + "1.0.0", + "elasticsearch.version", + Version.CURRENT.toString(), + "java.version", + System.getProperty("java.specification.version") + ); + + Path jar = plugin.resolve("impl.jar"); + Map sources = Map.ofEntries(entry("module-info", "module modular.plugin { exports p; }"), entry("p.A", """ + package p; + import org.elasticsearch.plugins.Plugin; + + public class A extends Plugin { + } + """)); + + // Usually org.elasticsearch.plugins.Plugin would be in the org.elasticsearch.server module. + // Unfortunately, as tests run non-modular, it will be in the unnamed module, so we need to add a read for it. + var classToBytes = InMemoryJavaCompiler.compile(sources, "--add-reads", "modular.plugin=ALL-UNNAMED"); + + JarUtils.createJarWithEntries( + jar, + Map.ofEntries(entry("module-info.class", classToBytes.get("module-info")), entry("p/A.class", classToBytes.get("p.A"))) + ); + + var pluginsLoader = newPluginsLoader(settings); + try { + var loadedLayers = pluginsLoader.pluginLayers().toList(); + + assertThat(loadedLayers, hasSize(1)); + assertThat(loadedLayers.get(0).pluginBundle().pluginDescriptor().getName(), equalTo("modular-plugin")); + assertThat(loadedLayers.get(0).pluginBundle().pluginDescriptor().isStable(), is(false)); + assertThat(loadedLayers.get(0).pluginBundle().pluginDescriptor().isModular(), is(true)); + + assertThat(pluginsLoader.pluginDescriptors(), hasSize(1)); + assertThat(pluginsLoader.pluginDescriptors().get(0).getName(), equalTo("modular-plugin")); + assertThat(pluginsLoader.pluginDescriptors().get(0).isModular(), is(true)); + + var pluginModuleLayer = loadedLayers.get(0).pluginModuleLayer(); + assertThat(pluginModuleLayer, is(not(ModuleLayer.boot()))); + assertThat(pluginModuleLayer.modules(), contains(transformedMatch(Module::getName, equalTo("modular.plugin")))); + } finally { + closePluginLoaders(pluginsLoader); + } + } + + public void testNonModularPluginLoading() throws Exception { + final Path home = createTempDir(); + final Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), home).build(); + final Path plugins = home.resolve("plugins"); + final Path plugin = plugins.resolve("non-modular-plugin"); + Files.createDirectories(plugin); + PluginTestUtil.writePluginProperties( + plugin, + "description", + "description", + "name", + "non-modular-plugin", + "classname", + "p.A", + "version", + "1.0.0", + "elasticsearch.version", + Version.CURRENT.toString(), + "java.version", + System.getProperty("java.specification.version") + ); + + Path jar = plugin.resolve("impl.jar"); + Map sources = Map.ofEntries(entry("p.A", """ + package p; + import org.elasticsearch.plugins.Plugin; + + public class A extends Plugin { + } + """)); + + var classToBytes = InMemoryJavaCompiler.compile(sources); + + JarUtils.createJarWithEntries(jar, Map.ofEntries(entry("p/A.class", classToBytes.get("p.A")))); + + var pluginsLoader = newPluginsLoader(settings); + try { + var loadedLayers = pluginsLoader.pluginLayers().toList(); + + assertThat(loadedLayers, hasSize(1)); + assertThat(loadedLayers.get(0).pluginBundle().pluginDescriptor().getName(), equalTo("non-modular-plugin")); + assertThat(loadedLayers.get(0).pluginBundle().pluginDescriptor().isStable(), is(false)); + assertThat(loadedLayers.get(0).pluginBundle().pluginDescriptor().isModular(), is(false)); + + assertThat(pluginsLoader.pluginDescriptors(), hasSize(1)); + assertThat(pluginsLoader.pluginDescriptors().get(0).getName(), equalTo("non-modular-plugin")); + assertThat(pluginsLoader.pluginDescriptors().get(0).isModular(), is(false)); + + var pluginModuleLayer = loadedLayers.get(0).pluginModuleLayer(); + assertThat(pluginModuleLayer, is(ModuleLayer.boot())); + } finally { + closePluginLoaders(pluginsLoader); + } + } + + // Closes the URLClassLoaders and UberModuleClassloaders created by the given plugin loader. + // We can use the direct ClassLoader from the plugin because tests do not use any parent SPI ClassLoaders. + static void closePluginLoaders(PluginsLoader pluginsLoader) { + pluginsLoader.pluginLayers().forEach(lp -> { + if (lp.pluginClassLoader() instanceof URLClassLoader urlClassLoader) { + try { + PrivilegedOperations.closeURLClassLoader(urlClassLoader); + } catch (IOException unexpected) { + throw new UncheckedIOException(unexpected); + } + } else if (lp.pluginClassLoader() instanceof UberModuleClassLoader loader) { + try { + PrivilegedOperations.closeURLClassLoader(loader.getInternalLoader()); + } catch (Exception e) { + throw new RuntimeException(e); + } + } else { + logger.info("Cannot close unexpected classloader " + lp.pluginClassLoader()); + } + }); + } } diff --git a/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpFixture.java b/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpFixture.java index c63c65a750d7..e232d10fdddb 100644 --- a/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpFixture.java +++ b/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpFixture.java @@ -8,38 +8,31 @@ */ package fixture.aws.imds; -import com.sun.net.httpserver.HttpHandler; import com.sun.net.httpserver.HttpServer; +import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.SuppressForbidden; import org.junit.rules.ExternalResource; +import org.junit.runner.Description; +import org.junit.runners.model.Statement; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.UnknownHostException; +import java.security.AccessController; +import java.security.PrivilegedAction; import java.util.Objects; -import java.util.Set; -import java.util.function.BiConsumer; public class Ec2ImdsHttpFixture extends ExternalResource { + public static final String ENDPOINT_OVERRIDE_SYSPROP_NAME = "com.amazonaws.sdk.ec2MetadataServiceEndpointOverride"; + + private final Ec2ImdsServiceBuilder ec2ImdsServiceBuilder; private HttpServer server; - private final Ec2ImdsVersion ec2ImdsVersion; - private final BiConsumer newCredentialsConsumer; - private final Set alternativeCredentialsEndpoints; - - public Ec2ImdsHttpFixture( - Ec2ImdsVersion ec2ImdsVersion, - BiConsumer newCredentialsConsumer, - Set alternativeCredentialsEndpoints - ) { - this.ec2ImdsVersion = Objects.requireNonNull(ec2ImdsVersion); - this.newCredentialsConsumer = Objects.requireNonNull(newCredentialsConsumer); - this.alternativeCredentialsEndpoints = Objects.requireNonNull(alternativeCredentialsEndpoints); - } - - protected HttpHandler createHandler() { - return new Ec2ImdsHttpHandler(ec2ImdsVersion, newCredentialsConsumer, alternativeCredentialsEndpoints); + public Ec2ImdsHttpFixture(Ec2ImdsServiceBuilder ec2ImdsServiceBuilder) { + this.ec2ImdsServiceBuilder = ec2ImdsServiceBuilder; } public String getAddress() { @@ -52,7 +45,7 @@ public class Ec2ImdsHttpFixture extends ExternalResource { protected void before() throws Throwable { server = HttpServer.create(resolveAddress(), 0); - server.createContext("/", Objects.requireNonNull(createHandler())); + server.createContext("/", Objects.requireNonNull(ec2ImdsServiceBuilder.buildHandler())); server.start(); } @@ -68,4 +61,32 @@ public class Ec2ImdsHttpFixture extends ExternalResource { throw new RuntimeException(e); } } + + @SuppressForbidden(reason = "deliberately adjusting system property for endpoint override for use in internal-cluster tests") + public static Releasable withEc2MetadataServiceEndpointOverride(String endpointOverride) { + final PrivilegedAction resetProperty = System.getProperty(ENDPOINT_OVERRIDE_SYSPROP_NAME) instanceof String originalValue + ? () -> System.setProperty(ENDPOINT_OVERRIDE_SYSPROP_NAME, originalValue) + : () -> System.clearProperty(ENDPOINT_OVERRIDE_SYSPROP_NAME); + doPrivileged(() -> System.setProperty(ENDPOINT_OVERRIDE_SYSPROP_NAME, endpointOverride)); + return () -> doPrivileged(resetProperty); + } + + private static void doPrivileged(PrivilegedAction privilegedAction) { + AccessController.doPrivileged(privilegedAction); + } + + public static void runWithFixture(Ec2ImdsServiceBuilder ec2ImdsServiceBuilder, CheckedConsumer action) { + final var imdsFixture = new Ec2ImdsHttpFixture(ec2ImdsServiceBuilder); + try { + imdsFixture.apply(new Statement() { + @Override + public void evaluate() throws Exception { + action.accept(imdsFixture); + } + }, Description.EMPTY).evaluate(); + } catch (Throwable e) { + throw new AssertionError(e); + } + } + } diff --git a/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpHandler.java b/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpHandler.java index 281465b96de0..0c58205ac8d6 100644 --- a/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpHandler.java +++ b/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsHttpHandler.java @@ -23,9 +23,11 @@ import java.time.Clock; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.util.Collection; +import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.function.BiConsumer; +import java.util.function.Supplier; import static org.elasticsearch.test.ESTestCase.randomIdentifier; import static org.elasticsearch.test.ESTestCase.randomSecretKey; @@ -42,16 +44,22 @@ public class Ec2ImdsHttpHandler implements HttpHandler { private final Set validImdsTokens = ConcurrentCollections.newConcurrentSet(); private final BiConsumer newCredentialsConsumer; + private final Map instanceAddresses; private final Set validCredentialsEndpoints = ConcurrentCollections.newConcurrentSet(); + private final Supplier availabilityZoneSupplier; public Ec2ImdsHttpHandler( Ec2ImdsVersion ec2ImdsVersion, BiConsumer newCredentialsConsumer, - Collection alternativeCredentialsEndpoints + Collection alternativeCredentialsEndpoints, + Supplier availabilityZoneSupplier, + Map instanceAddresses ) { this.ec2ImdsVersion = Objects.requireNonNull(ec2ImdsVersion); this.newCredentialsConsumer = Objects.requireNonNull(newCredentialsConsumer); + this.instanceAddresses = instanceAddresses; this.validCredentialsEndpoints.addAll(alternativeCredentialsEndpoints); + this.availabilityZoneSupplier = availabilityZoneSupplier; } @Override @@ -93,10 +101,11 @@ public class Ec2ImdsHttpHandler implements HttpHandler { if (path.equals(IMDS_SECURITY_CREDENTIALS_PATH)) { final var profileName = randomIdentifier(); validCredentialsEndpoints.add(IMDS_SECURITY_CREDENTIALS_PATH + profileName); - final byte[] response = profileName.getBytes(StandardCharsets.UTF_8); - exchange.getResponseHeaders().add("Content-Type", "text/plain"); - exchange.sendResponseHeaders(RestStatus.OK.getStatus(), response.length); - exchange.getResponseBody().write(response); + sendStringResponse(exchange, profileName); + return; + } else if (path.equals("/latest/meta-data/placement/availability-zone")) { + final var availabilityZone = availabilityZoneSupplier.get(); + sendStringResponse(exchange, availabilityZone); return; } else if (validCredentialsEndpoints.contains(path)) { final String accessKey = randomIdentifier(); @@ -121,10 +130,20 @@ public class Ec2ImdsHttpHandler implements HttpHandler { exchange.sendResponseHeaders(RestStatus.OK.getStatus(), response.length); exchange.getResponseBody().write(response); return; + } else if (instanceAddresses.get(path) instanceof String instanceAddress) { + sendStringResponse(exchange, instanceAddress); + return; } } ExceptionsHelper.maybeDieOnAnotherThread(new AssertionError("not supported: " + requestMethod + " " + path)); } } + + private void sendStringResponse(HttpExchange exchange, String value) throws IOException { + final byte[] response = value.getBytes(StandardCharsets.UTF_8); + exchange.getResponseHeaders().add("Content-Type", "text/plain"); + exchange.sendResponseHeaders(RestStatus.OK.getStatus(), response.length); + exchange.getResponseBody().write(response); + } } diff --git a/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsServiceBuilder.java b/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsServiceBuilder.java new file mode 100644 index 000000000000..505c9978bc4f --- /dev/null +++ b/test/fixtures/ec2-imds-fixture/src/main/java/fixture/aws/imds/Ec2ImdsServiceBuilder.java @@ -0,0 +1,71 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package fixture.aws.imds; + +import org.elasticsearch.test.ESTestCase; + +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import java.util.function.BiConsumer; +import java.util.function.Supplier; + +public class Ec2ImdsServiceBuilder { + + private final Ec2ImdsVersion ec2ImdsVersion; + private BiConsumer newCredentialsConsumer = Ec2ImdsServiceBuilder::rejectNewCredentials; + private Collection alternativeCredentialsEndpoints = Set.of(); + private Supplier availabilityZoneSupplier = Ec2ImdsServiceBuilder::rejectAvailabilityZone; + private final Map instanceAddresses = new HashMap<>(); + + public Ec2ImdsServiceBuilder(Ec2ImdsVersion ec2ImdsVersion) { + this.ec2ImdsVersion = ec2ImdsVersion; + } + + public Ec2ImdsServiceBuilder newCredentialsConsumer(BiConsumer newCredentialsConsumer) { + this.newCredentialsConsumer = newCredentialsConsumer; + return this; + } + + private static void rejectNewCredentials(String ignored1, String ignored2) { + ESTestCase.fail("credentials creation not supported"); + } + + public Ec2ImdsServiceBuilder alternativeCredentialsEndpoints(Collection alternativeCredentialsEndpoints) { + this.alternativeCredentialsEndpoints = alternativeCredentialsEndpoints; + return this; + } + + private static String rejectAvailabilityZone() { + return ESTestCase.fail(null, "availability zones not supported"); + } + + public Ec2ImdsServiceBuilder availabilityZoneSupplier(Supplier availabilityZoneSupplier) { + this.availabilityZoneSupplier = availabilityZoneSupplier; + return this; + } + + public Ec2ImdsServiceBuilder addInstanceAddress(String addressType, String addressValue) { + instanceAddresses.put("/latest/meta-data/" + addressType, addressValue); + return this; + } + + public Ec2ImdsHttpHandler buildHandler() { + return new Ec2ImdsHttpHandler( + ec2ImdsVersion, + newCredentialsConsumer, + alternativeCredentialsEndpoints, + availabilityZoneSupplier, + Map.copyOf(instanceAddresses) + ); + } + +} diff --git a/test/fixtures/ec2-imds-fixture/src/test/java/fixture/aws/imds/Ec2ImdsHttpHandlerTests.java b/test/fixtures/ec2-imds-fixture/src/test/java/fixture/aws/imds/Ec2ImdsHttpHandlerTests.java index bb613395a0fb..6d3eb3d14e9b 100644 --- a/test/fixtures/ec2-imds-fixture/src/test/java/fixture/aws/imds/Ec2ImdsHttpHandlerTests.java +++ b/test/fixtures/ec2-imds-fixture/src/test/java/fixture/aws/imds/Ec2ImdsHttpHandlerTests.java @@ -30,6 +30,7 @@ import java.io.OutputStream; import java.net.InetSocketAddress; import java.net.URI; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -43,7 +44,7 @@ public class Ec2ImdsHttpHandlerTests extends ESTestCase { public void testImdsV1() throws IOException { final Map generatedCredentials = new HashMap<>(); - final var handler = new Ec2ImdsHttpHandler(Ec2ImdsVersion.V1, generatedCredentials::put, Set.of()); + final var handler = new Ec2ImdsServiceBuilder(Ec2ImdsVersion.V1).newCredentialsConsumer(generatedCredentials::put).buildHandler(); final var roleResponse = handleRequest(handler, "GET", SECURITY_CREDENTIALS_URI); assertEquals(RestStatus.OK, roleResponse.status()); @@ -66,18 +67,14 @@ public class Ec2ImdsHttpHandlerTests extends ESTestCase { public void testImdsV2Disabled() { assertEquals( RestStatus.METHOD_NOT_ALLOWED, - handleRequest( - new Ec2ImdsHttpHandler(Ec2ImdsVersion.V1, (accessKey, sessionToken) -> fail(), Set.of()), - "PUT", - "/latest/api/token" - ).status() + handleRequest(new Ec2ImdsServiceBuilder(Ec2ImdsVersion.V1).buildHandler(), "PUT", "/latest/api/token").status() ); } public void testImdsV2() throws IOException { final Map generatedCredentials = new HashMap<>(); - final var handler = new Ec2ImdsHttpHandler(Ec2ImdsVersion.V2, generatedCredentials::put, Set.of()); + final var handler = new Ec2ImdsServiceBuilder(Ec2ImdsVersion.V2).newCredentialsConsumer(generatedCredentials::put).buildHandler(); final var tokenResponse = handleRequest(handler, "PUT", "/latest/api/token"); assertEquals(RestStatus.OK, tokenResponse.status()); @@ -101,6 +98,21 @@ public class Ec2ImdsHttpHandlerTests extends ESTestCase { assertEquals(sessionToken, responseMap.get("Token")); } + public void testAvailabilityZone() { + final Set generatedAvailabilityZones = new HashSet<>(); + final var handler = new Ec2ImdsServiceBuilder(Ec2ImdsVersion.V1).availabilityZoneSupplier(() -> { + final var newAvailabilityZone = randomIdentifier(); + generatedAvailabilityZones.add(newAvailabilityZone); + return newAvailabilityZone; + }).buildHandler(); + + final var availabilityZoneResponse = handleRequest(handler, "GET", "/latest/meta-data/placement/availability-zone"); + assertEquals(RestStatus.OK, availabilityZoneResponse.status()); + final var availabilityZone = availabilityZoneResponse.body().utf8ToString(); + + assertEquals(generatedAvailabilityZones, Set.of(availabilityZone)); + } + private record TestHttpResponse(RestStatus status, BytesReference body) {} private static TestHttpResponse checkImdsV2GetRequest(Ec2ImdsHttpHandler handler, String uri, String token) { diff --git a/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java b/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java index 91875600ec00..0a4c99eb8b52 100644 --- a/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java +++ b/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java @@ -45,7 +45,7 @@ public class MockPluginsService extends PluginsService { super( settings, environment.configFile(), - new PluginsLoader(Collections.emptyList(), Collections.emptyList(), Collections.emptyMap(), Collections.emptySet()) + new PluginsLoader(Collections.emptySet(), Collections.emptySet(), Collections.emptyMap()) ); List pluginsLoaded = new ArrayList<>(); diff --git a/test/framework/src/main/java/org/elasticsearch/plugins/internal/rewriter/MockQueryRewriteInterceptor.java b/test/framework/src/main/java/org/elasticsearch/plugins/internal/rewriter/MockQueryRewriteInterceptor.java new file mode 100644 index 000000000000..196e5bd4f4a2 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/plugins/internal/rewriter/MockQueryRewriteInterceptor.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.plugins.internal.rewriter; + +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryRewriteContext; + +public class MockQueryRewriteInterceptor implements QueryRewriteInterceptor { + + @Override + public QueryBuilder interceptAndRewrite(QueryRewriteContext context, QueryBuilder queryBuilder) { + return queryBuilder; + } + + @Override + public String getQueryName() { + return this.getClass().getSimpleName(); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java index bdf323afb8d9..20cb66affdde 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java @@ -71,6 +71,8 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.plugins.ScriptPlugin; import org.elasticsearch.plugins.SearchPlugin; +import org.elasticsearch.plugins.internal.rewriter.MockQueryRewriteInterceptor; +import org.elasticsearch.plugins.internal.rewriter.QueryRewriteInterceptor; import org.elasticsearch.plugins.scanners.StablePluginsRegistry; import org.elasticsearch.script.MockScriptEngine; import org.elasticsearch.script.MockScriptService; @@ -629,7 +631,8 @@ public abstract class AbstractBuilderTestCase extends ESTestCase { () -> true, scriptService, createMockResolvedIndices(), - null + null, + createMockQueryRewriteInterceptor() ); } @@ -670,5 +673,9 @@ public abstract class AbstractBuilderTestCase extends ESTestCase { Map.of(index, indexMetadata) ); } + + private QueryRewriteInterceptor createMockQueryRewriteInterceptor() { + return new MockQueryRewriteInterceptor(); + } } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregationBuilder.java index c75ed4610211..ea5a0adffa0e 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregationBuilder.java @@ -154,6 +154,6 @@ public class StringStatsAggregationBuilder extends ValuesSourceAggregationBuilde @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_7_6_0; + return TransportVersions.ZERO; } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensedFeature.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensedFeature.java index d86c15aa14bc..558303f7e0f0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensedFeature.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensedFeature.java @@ -104,7 +104,7 @@ public abstract class LicensedFeature { return needsActive; } - /** Create a momentary feature for hte given license level */ + /** Create a momentary feature for the given license level */ public static Momentary momentary(String family, String name, License.OperationMode licenseLevel) { return new Momentary(family, name, licenseLevel, true); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportPostStartTrialAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportPostStartTrialAction.java index 8a7ead2caf3b..212e593c9db9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportPostStartTrialAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/TransportPostStartTrialAction.java @@ -55,7 +55,7 @@ public class TransportPostStartTrialAction extends TransportMasterNodeAction listener ) throws Exception { - if (state.nodes().getMaxNodeVersion().after(state.nodes().getSmallestNonClientNodeVersion())) { + if (state.nodes().isMixedVersionCluster()) { throw new IllegalStateException( "Please ensure all nodes are on the same version before starting your trial, the highest node version in this cluster is [" + state.nodes().getMaxNodeVersion() diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java index 4f8a18e28aea..3c7b089b4cd6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java @@ -106,6 +106,7 @@ public class XPackLicenseState { messages.put(XPackField.CCR, XPackLicenseState::ccrAcknowledgementMessages); messages.put(XPackField.ENTERPRISE_SEARCH, XPackLicenseState::enterpriseSearchAcknowledgementMessages); messages.put(XPackField.REDACT_PROCESSOR, XPackLicenseState::redactProcessorAcknowledgementMessages); + messages.put(XPackField.ESQL, XPackLicenseState::esqlAcknowledgementMessages); ACKNOWLEDGMENT_MESSAGES = Collections.unmodifiableMap(messages); } @@ -243,6 +244,26 @@ public class XPackLicenseState { return Strings.EMPTY_ARRAY; } + private static String[] esqlAcknowledgementMessages(OperationMode currentMode, OperationMode newMode) { + /* + * Provide an acknowledgement warning to customers that downgrade from Trial or Enterprise to a lower + * license level (Basic, Standard, Gold or Premium) that they will no longer be able to do CCS in ES|QL. + */ + switch (newMode) { + case BASIC: + case STANDARD: + case GOLD: + case PLATINUM: + switch (currentMode) { + case TRIAL: + case ENTERPRISE: + return new String[] { "ES|QL cross-cluster search will be disabled." }; + } + break; + } + return Strings.EMPTY_ARRAY; + } + private static String[] machineLearningAcknowledgementMessages(OperationMode currentMode, OperationMode newMode) { switch (newMode) { case BASIC: diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java index e7a887a386c6..1ea17f9b796c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java @@ -282,11 +282,7 @@ public class AutoFollowMetadata extends AbstractNamedDiffable require ) { if (include == null) { - this.include = Collections.emptyMap(); + this.include = Map.of(); } else { this.include = include; } if (exclude == null) { - this.exclude = Collections.emptyMap(); + this.exclude = Map.of(); } else { this.exclude = exclude; } if (require == null) { - this.require = Collections.emptyMap(); + this.require = Map.of(); } else { this.require = require; } @@ -201,7 +199,7 @@ public class AllocateAction implements LifecycleAction { } UpdateSettingsStep allocateStep = new UpdateSettingsStep(allocateKey, allocationRoutedKey, client, newSettings.build()); AllocationRoutedStep routedCheckStep = new AllocationRoutedStep(allocationRoutedKey, nextStepKey); - return Arrays.asList(allocateStep, routedCheckStep); + return List.of(allocateStep, routedCheckStep); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/AllocationRoutedStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/AllocationRoutedStep.java index f586c3856693..6922ee4cef3e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/AllocationRoutedStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/AllocationRoutedStep.java @@ -22,7 +22,7 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; -import java.util.Collections; +import java.util.List; import static org.elasticsearch.xpack.core.ilm.step.info.AllocationInfo.allShardsActiveAllocationInfo; import static org.elasticsearch.xpack.core.ilm.step.info.AllocationInfo.waitingForActiveShardsAllocationInfo; @@ -66,7 +66,7 @@ public class AllocationRoutedStep extends ClusterStateWaitStep { } AllocationDeciders allocationDeciders = new AllocationDeciders( - Collections.singletonList( + List.of( new FilterAllocationDecider( clusterState.getMetadata().settings(), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DeleteAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DeleteAction.java index d212492f14d0..8712cefac5d3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DeleteAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DeleteAction.java @@ -17,7 +17,6 @@ import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.time.Instant; -import java.util.Arrays; import java.util.List; import java.util.Objects; @@ -99,7 +98,7 @@ public class DeleteAction implements LifecycleAction { ); CleanupSnapshotStep cleanupSnapshotStep = new CleanupSnapshotStep(cleanSnapshotKey, deleteStepKey, client); DeleteStep deleteStep = new DeleteStep(deleteStepKey, nextStepKey, client); - return Arrays.asList(waitForNoFollowersStep, waitUntilTimeSeriesEndTimeStep, cleanupSnapshotStep, deleteStep); + return List.of(waitForNoFollowersStep, waitUntilTimeSeriesEndTimeStep, cleanupSnapshotStep, deleteStep); } else { WaitForNoFollowersStep waitForNoFollowersStep = new WaitForNoFollowersStep( waitForNoFollowerStepKey, @@ -113,7 +112,7 @@ public class DeleteAction implements LifecycleAction { client ); DeleteStep deleteStep = new DeleteStep(deleteStepKey, nextStepKey, client); - return Arrays.asList(waitForNoFollowersStep, waitUntilTimeSeriesEndTimeStep, deleteStep); + return List.of(waitForNoFollowersStep, waitUntilTimeSeriesEndTimeStep, deleteStep); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ForceMergeStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ForceMergeStep.java index f3afe9e4d52c..741fff63f61f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ForceMergeStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ForceMergeStep.java @@ -20,7 +20,6 @@ import org.elasticsearch.common.Strings; import java.util.Arrays; import java.util.Objects; -import java.util.stream.Collectors; /** * Invokes a force merge on a single index. @@ -67,10 +66,7 @@ public class ForceMergeStep extends AsyncActionStep { policyName, failures == null ? "n/a" - : Strings.collectionToDelimitedString( - Arrays.stream(failures).map(Strings::toString).collect(Collectors.toList()), - "," - ), + : Strings.collectionToDelimitedString(Arrays.stream(failures).map(Strings::toString).toList(), ","), NAME ); logger.warn(errorMessage); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/FreezeAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/FreezeAction.java index e1e05859e58c..ab57613e4e72 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/FreezeAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/FreezeAction.java @@ -18,7 +18,6 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ilm.Step.StepKey; import java.io.IOException; -import java.util.Arrays; import java.util.List; /** @@ -98,7 +97,7 @@ public class FreezeAction implements LifecycleAction { ); CheckNotDataStreamWriteIndexStep checkNoWriteIndexStep = new CheckNotDataStreamWriteIndexStep(checkNotWriteIndex, freezeStepKey); FreezeStep freezeStep = new FreezeStep(freezeStepKey, nextStepKey, client); - return Arrays.asList(conditionalSkipFreezeStep, checkNoWriteIndexStep, freezeStep); + return List.of(conditionalSkipFreezeStep, checkNoWriteIndexStep, freezeStep); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyUtils.java index 68e850645157..a4086d7ce8bc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyUtils.java @@ -26,7 +26,6 @@ import org.elasticsearch.xpack.core.template.resources.TemplateResources; import java.io.IOException; import java.util.List; import java.util.Map; -import java.util.stream.Collectors; /** * A utility class used for index lifecycle policies @@ -120,7 +119,7 @@ public class LifecyclePolicyUtils { .stream() .filter(indexMetadata -> policyName.equals(indexMetadata.getLifecyclePolicyName())) .map(indexMetadata -> indexMetadata.getIndex().getName()) - .collect(Collectors.toList()); + .toList(); final List allDataStreams = indexNameExpressionResolver.dataStreamNames( project, @@ -135,12 +134,12 @@ public class LifecyclePolicyUtils { } else { return false; } - }).collect(Collectors.toList()); + }).toList(); final List composableTemplates = project.templatesV2().keySet().stream().filter(templateName -> { Settings settings = MetadataIndexTemplateService.resolveSettings(project, templateName); return policyName.equals(LifecycleSettings.LIFECYCLE_NAME_SETTING.get(settings)); - }).collect(Collectors.toList()); + }).toList(); return new ItemUsage(indices, dataStreams, composableTemplates); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReadOnlyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReadOnlyAction.java index 117abecafeab..2b03dc77eb5b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReadOnlyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReadOnlyAction.java @@ -17,7 +17,6 @@ import org.elasticsearch.xpack.core.ilm.Step.StepKey; import java.io.IOException; import java.time.Instant; -import java.util.Arrays; import java.util.List; /** @@ -72,7 +71,7 @@ public class ReadOnlyAction implements LifecycleAction { client ); ReadOnlyStep readOnlyStep = new ReadOnlyStep(readOnlyKey, nextStepKey, client); - return Arrays.asList(checkNotWriteIndexStep, waitUntilTimeSeriesEndTimeStep, readOnlyStep); + return List.of(checkNotWriteIndexStep, waitUntilTimeSeriesEndTimeStep, readOnlyStep); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/RolloverAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/RolloverAction.java index 515941bce841..f3c72004d6cc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/RolloverAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/RolloverAction.java @@ -22,7 +22,6 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ilm.Step.StepKey; import java.io.IOException; -import java.util.Arrays; import java.util.List; import java.util.Objects; @@ -172,7 +171,7 @@ public class RolloverAction implements LifecycleAction { client, INDEXING_COMPLETE ); - return Arrays.asList(waitForRolloverReadyStep, rolloverStep, waitForActiveShardsStep, updateDateStep, setIndexingCompleteStep); + return List.of(waitForRolloverReadyStep, rolloverStep, waitForActiveShardsStep, updateDateStep, setIndexingCompleteStep); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SegmentCountStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SegmentCountStep.java index ad8f450fb084..95ca049740c7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SegmentCountStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SegmentCountStep.java @@ -67,10 +67,7 @@ public class SegmentCountStep extends AsyncWaitStep { response.getFailedShards(), failures == null ? "n/a" - : Strings.collectionToDelimitedString( - Arrays.stream(failures).map(Strings::toString).collect(Collectors.toList()), - "," - ) + : Strings.collectionToDelimitedString(Arrays.stream(failures).map(Strings::toString).toList(), ",") ); listener.onResponse(true, new Info(-1)); } else { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SetPriorityAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SetPriorityAction.java index 376567bc2004..5f7c1d0c3bf3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SetPriorityAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/SetPriorityAction.java @@ -21,7 +21,6 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ilm.Step.StepKey; import java.io.IOException; -import java.util.Collections; import java.util.List; import java.util.Objects; @@ -101,7 +100,7 @@ public class SetPriorityAction implements LifecycleAction { Settings indexPriority = recoveryPriority == null ? NULL_PRIORITY_SETTINGS : Settings.builder().put(IndexMetadata.INDEX_PRIORITY_SETTING.getKey(), recoveryPriority).build(); - return Collections.singletonList(new UpdateSettingsStep(key, nextStepKey, client, indexPriority)); + return List.of(new UpdateSettingsStep(key, nextStepKey, client, indexPriority)); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ShrinkAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ShrinkAction.java index 3a95b1398751..c39debfb0f98 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ShrinkAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ShrinkAction.java @@ -31,7 +31,6 @@ import java.io.IOException; import java.time.Instant; import java.util.List; import java.util.Objects; -import java.util.stream.Collectors; import java.util.stream.Stream; import static org.elasticsearch.xpack.core.ilm.ShrinkIndexNameSupplier.SHRUNKEN_INDEX_PREFIX; @@ -329,7 +328,7 @@ public class ShrinkAction implements LifecycleAction { allowWriteAfterShrinkStep ); - return steps.filter(Objects::nonNull).collect(Collectors.toList()); + return steps.filter(Objects::nonNull).toList(); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleType.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleType.java index 48a0e65bddf2..0fd280f440f3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleType.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleType.java @@ -14,7 +14,6 @@ import org.elasticsearch.core.Tuple; import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -30,8 +29,6 @@ import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; -import static java.util.stream.Collectors.toList; - /** * Represents the lifecycle of an index from creation to deletion. A * {@link TimeseriesLifecycleType} is made up of a set of {@link Phase}s which it will @@ -114,7 +111,7 @@ public class TimeseriesLifecycleType implements LifecycleType { // Set of actions that cannot be defined (executed) after the managed index has been mounted as searchable snapshot. // It's ordered to produce consistent error messages which can be unit tested. public static final Set ACTIONS_CANNOT_FOLLOW_SEARCHABLE_SNAPSHOT = Collections.unmodifiableSet( - new LinkedHashSet<>(Arrays.asList(ForceMergeAction.NAME, FreezeAction.NAME, ShrinkAction.NAME, DownsampleAction.NAME)) + new LinkedHashSet<>(List.of(ForceMergeAction.NAME, FreezeAction.NAME, ShrinkAction.NAME, DownsampleAction.NAME)) ); private TimeseriesLifecycleType() {} @@ -180,11 +177,11 @@ public class TimeseriesLifecycleType implements LifecycleType { public List getOrderedActions(Phase phase) { Map actions = phase.getActions(); return switch (phase.getName()) { - case HOT_PHASE -> ORDERED_VALID_HOT_ACTIONS.stream().map(actions::get).filter(Objects::nonNull).collect(toList()); - case WARM_PHASE -> ORDERED_VALID_WARM_ACTIONS.stream().map(actions::get).filter(Objects::nonNull).collect(toList()); - case COLD_PHASE -> ORDERED_VALID_COLD_ACTIONS.stream().map(actions::get).filter(Objects::nonNull).collect(toList()); - case FROZEN_PHASE -> ORDERED_VALID_FROZEN_ACTIONS.stream().map(actions::get).filter(Objects::nonNull).collect(toList()); - case DELETE_PHASE -> ORDERED_VALID_DELETE_ACTIONS.stream().map(actions::get).filter(Objects::nonNull).collect(toList()); + case HOT_PHASE -> ORDERED_VALID_HOT_ACTIONS.stream().map(actions::get).filter(Objects::nonNull).toList(); + case WARM_PHASE -> ORDERED_VALID_WARM_ACTIONS.stream().map(actions::get).filter(Objects::nonNull).toList(); + case COLD_PHASE -> ORDERED_VALID_COLD_ACTIONS.stream().map(actions::get).filter(Objects::nonNull).toList(); + case FROZEN_PHASE -> ORDERED_VALID_FROZEN_ACTIONS.stream().map(actions::get).filter(Objects::nonNull).toList(); + case DELETE_PHASE -> ORDERED_VALID_DELETE_ACTIONS.stream().map(actions::get).filter(Objects::nonNull).toList(); default -> throw new IllegalArgumentException("lifecycle type [" + TYPE + "] does not support phase [" + phase.getName() + "]"); }; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/UnfollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/UnfollowAction.java index 31b9db3c2407..312551a2611f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/UnfollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/UnfollowAction.java @@ -17,7 +17,6 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ilm.Step.StepKey; import java.io.IOException; -import java.util.Arrays; import java.util.List; import java.util.Map; @@ -72,7 +71,7 @@ public final class UnfollowAction implements LifecycleAction { UnfollowFollowerIndexStep step5 = new UnfollowFollowerIndexStep(unfollowFollowerIndex, openFollowerIndex, client); OpenIndexStep step6 = new OpenIndexStep(openFollowerIndex, waitForYellowStep, client); WaitForIndexColorStep step7 = new WaitForIndexColorStep(waitForYellowStep, nextStepKey, ClusterHealthStatus.YELLOW); - return Arrays.asList(conditionalSkipUnfollowStep, step1, step2, step3, step4, step5, step6, step7); + return List.of(conditionalSkipUnfollowStep, step1, step2, step3, step4, step5, step6, step7); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForFollowShardTasksStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForFollowShardTasksStep.java index 86df01d3345a..90f36ef33121 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForFollowShardTasksStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForFollowShardTasksStep.java @@ -22,7 +22,6 @@ import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction; import java.io.IOException; import java.util.List; import java.util.Map; -import java.util.stream.Collectors; import static org.elasticsearch.xpack.core.ilm.UnfollowAction.CCR_METADATA_KEY; @@ -78,7 +77,7 @@ final class WaitForFollowShardTasksStep extends AsyncWaitStep { status.followerGlobalCheckpoint() ) ) - .collect(Collectors.toList()); + .toList(); listener.onResponse(false, new Info(shardFollowTaskInfos)); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForSnapshotAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForSnapshotAction.java index 08a884f0b8f3..2633656d7c30 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForSnapshotAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForSnapshotAction.java @@ -17,7 +17,6 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ilm.Step.StepKey; import java.io.IOException; -import java.util.Collections; import java.util.List; import java.util.Objects; @@ -62,7 +61,7 @@ public class WaitForSnapshotAction implements LifecycleAction { @Override public List toSteps(Client client, String phase, StepKey nextStepKey) { StepKey waitForSnapshotKey = new StepKey(phase, NAME, WaitForSnapshotStep.NAME); - return Collections.singletonList(new WaitForSnapshotStep(waitForSnapshotKey, nextStepKey, client, policy)); + return List.of(new WaitForSnapshotStep(waitForSnapshotKey, nextStepKey, client, policy)); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityFeatureSetUsage.java index 726797d2e563..f44409daa37f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityFeatureSetUsage.java @@ -61,9 +61,7 @@ public class SecurityFeatureSetUsage extends XPackFeatureUsage { ipFilterUsage = in.readGenericMap(); anonymousUsage = in.readGenericMap(); roleMappingStoreUsage = in.readGenericMap(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_5_0)) { - fips140Usage = in.readGenericMap(); - } + fips140Usage = in.readGenericMap(); if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_11_0)) { operatorPrivilegesUsage = in.readGenericMap(); } @@ -129,9 +127,7 @@ public class SecurityFeatureSetUsage extends XPackFeatureUsage { out.writeGenericMap(ipFilterUsage); out.writeGenericMap(anonymousUsage); out.writeGenericMap(roleMappingStoreUsage); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_5_0)) { - out.writeGenericMap(fips140Usage); - } + out.writeGenericMap(fips140Usage); if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_11_0)) { out.writeGenericMap(operatorPrivilegesUsage); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationRequest.java index 3b5ddb21be91..067bb156aa90 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationRequest.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.core.security.action.saml; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.common.io.stream.StreamInput; @@ -33,9 +32,7 @@ public final class SamlPrepareAuthenticationRequest extends ActionRequest { super(in); realmName = in.readOptionalString(); assertionConsumerServiceURL = in.readOptionalString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_5_0)) { - relayState = in.readOptionalString(); - } + relayState = in.readOptionalString(); } public SamlPrepareAuthenticationRequest() {} @@ -87,8 +84,6 @@ public final class SamlPrepareAuthenticationRequest extends ActionRequest { super.writeTo(out); out.writeOptionalString(realmName); out.writeOptionalString(assertionConsumerServiceURL); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_5_0)) { - out.writeOptionalString(relayState); - } + out.writeOptionalString(relayState); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SLMFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SLMFeatureSetUsage.java index 099eaa2468e1..53b45827c4a9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SLMFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/SLMFeatureSetUsage.java @@ -41,7 +41,7 @@ public class SLMFeatureSetUsage extends XPackFeatureUsage { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_7_5_0; + return TransportVersions.ZERO; } public SnapshotLifecycleStats getStats() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java index 9704335776f1..d0d5e463f965 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLService.java @@ -596,6 +596,8 @@ public class SSLService { sslSettingsMap.put(WatcherField.EMAIL_NOTIFICATION_SSL_PREFIX, settings.getByPrefix(WatcherField.EMAIL_NOTIFICATION_SSL_PREFIX)); sslSettingsMap.put(XPackSettings.TRANSPORT_SSL_PREFIX, settings.getByPrefix(XPackSettings.TRANSPORT_SSL_PREFIX)); sslSettingsMap.putAll(getTransportProfileSSLSettings(settings)); + // Mount Elastic Inference Service (part of the Inference plugin) configuration + sslSettingsMap.put("xpack.inference.elastic.http.ssl", settings.getByPrefix("xpack.inference.elastic.http.ssl.")); // Only build remote cluster server SSL if the port is enabled if (REMOTE_CLUSTER_SERVER_ENABLED.get(settings)) { sslSettingsMap.put(XPackSettings.REMOTE_CLUSTER_SERVER_SSL_PREFIX, getRemoteClusterServerSslSettings(settings)); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformFeatureSetUsage.java index e4c15a3b9007..909bf6858eab 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformFeatureSetUsage.java @@ -50,7 +50,7 @@ public class TransformFeatureSetUsage extends XPackFeatureUsage { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_7_5_0; + return TransportVersions.ZERO; } @Override diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java index e889d25cd7a9..d788a0b5abd3 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.XPackField; import java.util.Arrays; +import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; @@ -59,6 +60,12 @@ public class XPackLicenseStateTests extends ESTestCase { assertEquals(expectedMessages, gotMessages.length); } + void assertAckMessages(String feature, OperationMode from, OperationMode to, Set expectedMessages) { + String[] gotMessages = XPackLicenseState.ACKNOWLEDGMENT_MESSAGES.get(feature).apply(from, to); + Set actualMessages = Arrays.stream(gotMessages).collect(Collectors.toSet()); + assertThat(actualMessages, equalTo(expectedMessages)); + } + static T randomFrom(T[] values, Predicate filter) { return randomFrom(Arrays.stream(values).filter(filter).collect(Collectors.toList())); } @@ -143,6 +150,16 @@ public class XPackLicenseStateTests extends ESTestCase { assertAckMessages(XPackField.CCR, randomTrialOrPlatinumMode(), randomBasicStandardOrGold(), 1); } + public void testEsqlAckToTrialOrPlatinum() { + assertAckMessages(XPackField.ESQL, randomMode(), randomFrom(TRIAL, ENTERPRISE), 0); + } + + public void testEsqlAckTrialOrEnterpriseToNotTrialOrEnterprise() { + for (OperationMode to : List.of(BASIC, STANDARD, GOLD, PLATINUM)) { + assertAckMessages(XPackField.ESQL, randomFrom(TRIAL, ENTERPRISE), to, Set.of("ES|QL cross-cluster search will be disabled.")); + } + } + public void testExpiredLicense() { // use standard feature which would normally be allowed at all license levels LicensedFeature feature = LicensedFeature.momentary("family", "enterpriseFeature", STANDARD); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java index 1f2c89c473a6..d50f7bb27a5d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java @@ -623,7 +623,7 @@ public class LocalStateCompositeXPackPlugin extends XPackPlugin } @SuppressWarnings("unchecked") - private List filterPlugins(Class type) { + protected List filterPlugins(Class type) { return plugins.stream().filter(x -> type.isAssignableFrom(x.getClass())).map(p -> ((T) p)).collect(Collectors.toList()); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/AllocateActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/AllocateActionTests.java index 1fc0afafde35..c5a8185f8511 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/AllocateActionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/AllocateActionTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ilm.Step.StepKey; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -44,20 +43,20 @@ public class AllocateActionTests extends AbstractActionTestCase includes = randomAllocationRoutingMap(1, 100); hasAtLeastOneMap = true; } else { - includes = randomBoolean() ? null : Collections.emptyMap(); + includes = randomBoolean() ? null : Map.of(); } Map excludes; if (randomBoolean()) { hasAtLeastOneMap = true; excludes = randomAllocationRoutingMap(1, 100); } else { - excludes = randomBoolean() ? null : Collections.emptyMap(); + excludes = randomBoolean() ? null : Map.of(); } Map requires; if (hasAtLeastOneMap == false || randomBoolean()) { requires = randomAllocationRoutingMap(1, 100); } else { - requires = randomBoolean() ? null : Collections.emptyMap(); + requires = randomBoolean() ? null : Map.of(); } Integer numberOfReplicas = randomBoolean() ? null : randomIntBetween(0, 10); Integer totalShardsPerNode = randomBoolean() ? null : randomIntBetween(-1, 10); @@ -97,9 +96,9 @@ public class AllocateActionTests extends AbstractActionTestCase } public void testAllMapsNullOrEmpty() { - Map include = randomBoolean() ? null : Collections.emptyMap(); - Map exclude = randomBoolean() ? null : Collections.emptyMap(); - Map require = randomBoolean() ? null : Collections.emptyMap(); + Map include = randomBoolean() ? null : Map.of(); + Map exclude = randomBoolean() ? null : Map.of(); + Map require = randomBoolean() ? null : Map.of(); IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, () -> new AllocateAction(null, null, include, exclude, require) @@ -124,8 +123,8 @@ public class AllocateActionTests extends AbstractActionTestCase public void testInvalidNumberOfReplicas() { Map include = randomAllocationRoutingMap(1, 5); - Map exclude = randomBoolean() ? null : Collections.emptyMap(); - Map require = randomBoolean() ? null : Collections.emptyMap(); + Map exclude = randomBoolean() ? null : Map.of(); + Map require = randomBoolean() ? null : Map.of(); IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, () -> new AllocateAction(randomIntBetween(-1000, -1), randomIntBetween(0, 300), include, exclude, require) @@ -135,8 +134,8 @@ public class AllocateActionTests extends AbstractActionTestCase public void testInvalidTotalShardsPerNode() { Map include = randomAllocationRoutingMap(1, 5); - Map exclude = randomBoolean() ? null : Collections.emptyMap(); - Map require = randomBoolean() ? null : Collections.emptyMap(); + Map exclude = randomBoolean() ? null : Map.of(); + Map require = randomBoolean() ? null : Map.of(); IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, () -> new AllocateAction(randomIntBetween(0, 300), randomIntBetween(-1000, -2), include, exclude, require) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/AllocationRoutedStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/AllocationRoutedStepTests.java index afad708ddbe2..708c3630b8b8 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/AllocationRoutedStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/AllocationRoutedStepTests.java @@ -27,7 +27,6 @@ import org.elasticsearch.node.Node; import org.elasticsearch.xpack.core.ilm.ClusterStateWaitStep.Result; import org.elasticsearch.xpack.core.ilm.Step.StepKey; -import java.util.Collections; import java.util.Map; import static org.elasticsearch.cluster.routing.TestShardRouting.buildUnassignedInfo; @@ -109,7 +108,7 @@ public class AllocationRoutedStepTests extends AbstractStepTestCase requires = Collections.singletonMap(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + "foo", "bar"); + Map requires = Map.of(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + "foo", "bar"); Settings.Builder existingSettings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) .put(IndexMetadata.SETTING_INDEX_UUID, index.getUUID()); @@ -187,7 +186,7 @@ public class AllocationRoutedStepTests extends AbstractStepTestCase excludes = Collections.singletonMap(IndexMetadata.INDEX_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + "foo", "bar"); + Map excludes = Map.of(IndexMetadata.INDEX_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + "foo", "bar"); Settings.Builder existingSettings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) .put(IndexMetadata.SETTING_INDEX_UUID, index.getUUID()); @@ -218,7 +217,7 @@ public class AllocationRoutedStepTests extends AbstractStepTestCase includes = Collections.singletonMap(IndexMetadata.INDEX_ROUTING_INCLUDE_GROUP_SETTING.getKey() + "foo", "bar"); + Map includes = Map.of(IndexMetadata.INDEX_ROUTING_INCLUDE_GROUP_SETTING.getKey() + "foo", "bar"); Settings.Builder existingSettings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) .put(IndexMetadata.SETTING_INDEX_UUID, index.getUUID()); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CheckShrinkReadyStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CheckShrinkReadyStepTests.java index 8dcd8fc7ddd5..72bf7cedb2fb 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CheckShrinkReadyStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/CheckShrinkReadyStepTests.java @@ -29,9 +29,9 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.node.Node; -import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Set; import static org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata.Type.SIGTERM; import static org.elasticsearch.cluster.routing.TestShardRouting.shardRoutingBuilder; @@ -340,7 +340,7 @@ public class CheckShrinkReadyStepTests extends AbstractStepTestCase requires = Collections.singletonMap("_id", "node1"); + Map requires = Map.of("_id", "node1"); Settings.Builder existingSettings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) .put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_PREFIX + "._id", "node1") @@ -376,7 +376,7 @@ public class CheckShrinkReadyStepTests extends AbstractStepTestCase requires = Collections.singletonMap("_id", "node1"); + Map requires = Map.of("_id", "node1"); Settings.Builder existingSettings = Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) .put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_PREFIX + "._id", "node1") @@ -458,7 +458,7 @@ public class CheckShrinkReadyStepTests extends AbstractStepTestCase listener = (ActionListener) invocation.getArguments()[1]; - listener.onResponse(new CloseIndexResponse(true, true, Collections.emptyList())); + listener.onResponse(new CloseIndexResponse(true, true, List.of())); return null; }).when(indicesClient).close(Mockito.any(), Mockito.any()); @@ -54,7 +55,7 @@ public class CloseFollowerIndexStepTests extends AbstractStepTestCase listener = (ActionListener) invocation.getArguments()[1]; - listener.onResponse(new CloseIndexResponse(false, false, Collections.emptyList())); + listener.onResponse(new CloseIndexResponse(false, false, List.of())); return null; }).when(indicesClient).close(Mockito.any(), Mockito.any()); @@ -85,7 +86,7 @@ public class CloseFollowerIndexStepTests extends AbstractStepTestCase { @SuppressWarnings("unchecked") ActionListener listener = (ActionListener) invocation.getArguments()[1]; assertThat(request.indices(), equalTo(new String[] { indexMetadata.getIndex().getName() })); - listener.onResponse( - new CloseIndexResponse(true, true, Collections.singletonList(new CloseIndexResponse.IndexResult(indexMetadata.getIndex()))) - ); + listener.onResponse(new CloseIndexResponse(true, true, List.of(new CloseIndexResponse.IndexResult(indexMetadata.getIndex())))); return null; }).when(indicesClient).close(Mockito.any(), Mockito.any()); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ClusterStateWaitUntilThresholdStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ClusterStateWaitUntilThresholdStepTests.java index f24f0f86de7d..eeddda419966 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ClusterStateWaitUntilThresholdStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ClusterStateWaitUntilThresholdStepTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.xpack.core.ilm.step.info.SingleMessageFieldInfo; import java.time.Clock; import java.time.Instant; import java.time.ZoneId; -import java.util.Collections; import java.util.Map; import java.util.UUID; @@ -83,7 +82,7 @@ public class ClusterStateWaitUntilThresholdStepTests extends AbstractStepTestCas .put(LifecycleSettings.LIFECYCLE_STEP_WAIT_TIME_THRESHOLD, "480h") ) .putCustom(ILM_CUSTOM_METADATA_KEY, Map.of("step_time", String.valueOf(System.currentTimeMillis()))) - .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .putCustom(CCR_METADATA_KEY, Map.of()) .numberOfShards(1) .numberOfReplicas(0) .build(); @@ -107,7 +106,7 @@ public class ClusterStateWaitUntilThresholdStepTests extends AbstractStepTestCas .put(LifecycleSettings.LIFECYCLE_STEP_WAIT_TIME_THRESHOLD, "48h") ) .putCustom(ILM_CUSTOM_METADATA_KEY, Map.of("step_time", String.valueOf(System.currentTimeMillis()))) - .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .putCustom(CCR_METADATA_KEY, Map.of()) .numberOfShards(1) .numberOfReplicas(0) .build(); @@ -140,7 +139,7 @@ public class ClusterStateWaitUntilThresholdStepTests extends AbstractStepTestCas settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true") .put(LifecycleSettings.LIFECYCLE_STEP_WAIT_TIME_THRESHOLD, "1s") ) - .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .putCustom(CCR_METADATA_KEY, Map.of()) .putCustom(ILM_CUSTOM_METADATA_KEY, Map.of("step_time", String.valueOf(1234L))) .numberOfShards(1) .numberOfReplicas(0) @@ -168,7 +167,7 @@ public class ClusterStateWaitUntilThresholdStepTests extends AbstractStepTestCas settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "false") .put(LifecycleSettings.LIFECYCLE_STEP_WAIT_TIME_THRESHOLD, "1h") ) - .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .putCustom(CCR_METADATA_KEY, Map.of()) .putCustom(ILM_CUSTOM_METADATA_KEY, Map.of("step_time", String.valueOf(1234L))) .numberOfShards(1) .numberOfReplicas(0) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponseTests.java index 937502281b64..c4138d228719 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ExplainLifecycleResponseTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -81,7 +80,7 @@ public class ExplainLifecycleResponseTests extends AbstractXContentSerializingTe protected NamedWriteableRegistry getNamedWriteableRegistry() { return new NamedWriteableRegistry( - Arrays.asList(new NamedWriteableRegistry.Entry(LifecycleAction.class, MockAction.NAME, MockAction::new)) + List.of(new NamedWriteableRegistry.Entry(LifecycleAction.class, MockAction.NAME, MockAction::new)) ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ForceMergeActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ForceMergeActionTests.java index aecf029a1357..b8d480200fb5 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ForceMergeActionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ForceMergeActionTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.xpack.core.ilm.Step.StepKey; import java.io.IOException; import java.util.List; -import java.util.stream.Collectors; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; @@ -109,7 +108,7 @@ public class ForceMergeActionTests extends AbstractActionTestCase new Tuple<>(s.getKey(), s.getNextStepKey())) - .collect(Collectors.toList()); + .toList(); StepKey checkNotWriteIndex = new StepKey(phase, ForceMergeAction.NAME, CheckNotDataStreamWriteIndexStep.NAME); StepKey waitTimeSeriesEndTimePassesKey = new StepKey(phase, ForceMergeAction.NAME, WaitUntilTimeSeriesEndTimePassesStep.NAME); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStepTests.java index 1e1b69a3db06..748798985b20 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStepTests.java @@ -17,7 +17,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexVersion; -import java.util.Collections; +import java.util.List; import java.util.Locale; import static org.elasticsearch.cluster.metadata.LifecycleExecutionState.ILM_CUSTOM_METADATA_KEY; @@ -82,7 +82,7 @@ public class GenerateSnapshotNameStepTests extends AbstractStepTestCase entries = new ArrayList<>(ClusterModule.getNamedXWriteables()); entries.addAll( - Arrays.asList( + List.of( new NamedXContentRegistry.Entry( LifecycleType.class, new ParseField(TimeseriesLifecycleType.TYPE), diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyTests.java index 70f75f1cfcdf..1bea0ac6d192 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyTests.java @@ -20,8 +20,6 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.ilm.Step.StepKey; import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; @@ -30,7 +28,6 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.function.Function; -import java.util.stream.Collectors; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -48,7 +45,7 @@ public class LifecyclePolicyTests extends AbstractXContentSerializingTestCase

  • entries = new ArrayList<>(ClusterModule.getNamedXWriteables()); entries.addAll( - Arrays.asList( + List.of( new NamedXContentRegistry.Entry( LifecycleType.class, new ParseField(TimeseriesLifecycleType.TYPE), @@ -150,7 +147,7 @@ public class LifecyclePolicyTests extends AbstractXContentSerializingTestCase
  • TimeseriesLifecycleType.FROZEN_PHASE.equals(pn) == false) - .collect(Collectors.toList()); + .toList(); // let's order the phases so we can reason about actions in a previous phase in order to generate a random *valid* policy List orderedPhases = new ArrayList<>(phaseNames.size()); @@ -218,7 +215,7 @@ public class LifecyclePolicyTests extends AbstractXContentSerializingTestCase
  • randomFrom( TimeseriesLifecycleType.ORDERED_VALID_PHASES.stream() .filter(pn -> TimeseriesLifecycleType.FROZEN_PHASE.equals(pn) == false) - .collect(Collectors.toList()) + .toList() ) ); phases = new LinkedHashMap<>(phases); - phases.put(phaseName, new Phase(phaseName, null, Collections.emptyMap())); + phases.put(phaseName, new Phase(phaseName, null, Map.of())); } case 2 -> metadata = randomValueOtherThan(metadata, LifecyclePolicyTests::randomMeta); case 3 -> deprecated = instance.isDeprecated() ? randomFrom(false, null) : true; @@ -337,8 +334,8 @@ public class LifecyclePolicyTests extends AbstractXContentSerializingTestCase
  • phases = new LinkedHashMap<>(); - LifecycleAction firstAction = new MockAction(Arrays.asList(mockStep)); - Map actions = Collections.singletonMap(MockAction.NAME, firstAction); + LifecycleAction firstAction = new MockAction(List.of(mockStep)); + Map actions = Map.of(MockAction.NAME, firstAction); Phase firstPhase = new Phase("test", TimeValue.ZERO, actions); phases.put(firstPhase.getName(), firstPhase); LifecyclePolicy policy = new LifecyclePolicy(TestLifecycleType.INSTANCE, lifecycleName, phases, randomMeta()); @@ -372,10 +369,10 @@ public class LifecyclePolicyTests extends AbstractXContentSerializingTestCase
  • phases = new LinkedHashMap<>(); - LifecycleAction firstAction = new MockAction(Arrays.asList(firstActionStep, firstActionAnotherStep)); - LifecycleAction secondAction = new MockAction(Arrays.asList(secondActionStep)); - Map firstActions = Collections.singletonMap(MockAction.NAME, firstAction); - Map secondActions = Collections.singletonMap(MockAction.NAME, secondAction); + LifecycleAction firstAction = new MockAction(List.of(firstActionStep, firstActionAnotherStep)); + LifecycleAction secondAction = new MockAction(List.of(secondActionStep)); + Map firstActions = Map.of(MockAction.NAME, firstAction); + Map secondActions = Map.of(MockAction.NAME, secondAction); Phase firstPhase = new Phase("first_phase", TimeValue.ZERO, firstActions); Phase secondPhase = new Phase("second_phase", TimeValue.ZERO, secondActions); phases.put(firstPhase.getName(), firstPhase); @@ -401,10 +398,10 @@ public class LifecyclePolicyTests extends AbstractXContentSerializingTestCase
  • phases = new LinkedHashMap<>(); - LifecycleAction firstAction = new MockAction(Collections.emptyList(), true); - LifecycleAction secondAction = new MockAction(Collections.emptyList(), false); - Map firstActions = Collections.singletonMap(MockAction.NAME, firstAction); - Map secondActions = Collections.singletonMap(MockAction.NAME, secondAction); + LifecycleAction firstAction = new MockAction(List.of(), true); + LifecycleAction secondAction = new MockAction(List.of(), false); + Map firstActions = Map.of(MockAction.NAME, firstAction); + Map secondActions = Map.of(MockAction.NAME, secondAction); Phase firstPhase = new Phase("first_phase", TimeValue.ZERO, firstActions); Phase secondPhase = new Phase("second_phase", TimeValue.ZERO, secondActions); phases.put(firstPhase.getName(), firstPhase); @@ -458,12 +455,9 @@ public class LifecyclePolicyTests extends AbstractXContentSerializingTestCase
  • randomMeta() { if (randomBoolean()) { if (randomBoolean()) { - return Collections.singletonMap(randomAlphaOfLength(4), randomAlphaOfLength(4)); + return Map.of(randomAlphaOfLength(4), randomAlphaOfLength(4)); } else { - return Collections.singletonMap( - randomAlphaOfLength(5), - Collections.singletonMap(randomAlphaOfLength(4), randomAlphaOfLength(4)) - ); + return Map.of(randomAlphaOfLength(5), Map.of(randomAlphaOfLength(4), randomAlphaOfLength(4))); } } else { return null; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyUtilsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyUtilsTests.java index 7bdb4acff589..2dcb8d6c2a10 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyUtilsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/LifecyclePolicyUtilsTests.java @@ -20,8 +20,8 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.test.ESTestCase; -import java.util.Arrays; -import java.util.Collections; +import java.util.List; +import java.util.Map; import static org.hamcrest.Matchers.equalTo; @@ -34,7 +34,7 @@ public class LifecyclePolicyUtilsTests extends ESTestCase { var project = ProjectMetadata.builder(randomProjectIdOrDefault()).build(); assertThat( LifecyclePolicyUtils.calculateUsage(iner, project, "mypolicy"), - equalTo(new ItemUsage(Collections.emptyList(), Collections.emptyList(), Collections.emptyList())) + equalTo(new ItemUsage(List.of(), List.of(), List.of())) ); } @@ -44,14 +44,14 @@ public class LifecyclePolicyUtilsTests extends ESTestCase { .putCustom( IndexLifecycleMetadata.TYPE, new IndexLifecycleMetadata( - Collections.singletonMap("mypolicy", LifecyclePolicyMetadataTests.createRandomPolicyMetadata("mypolicy")), + Map.of("mypolicy", LifecyclePolicyMetadataTests.createRandomPolicyMetadata("mypolicy")), OperationMode.RUNNING ) ) .build(); assertThat( LifecyclePolicyUtils.calculateUsage(iner, project, "mypolicy"), - equalTo(new ItemUsage(Collections.emptyList(), Collections.emptyList(), Collections.emptyList())) + equalTo(new ItemUsage(List.of(), List.of(), List.of())) ); } @@ -61,7 +61,7 @@ public class LifecyclePolicyUtilsTests extends ESTestCase { .putCustom( IndexLifecycleMetadata.TYPE, new IndexLifecycleMetadata( - Collections.singletonMap("mypolicy", LifecyclePolicyMetadataTests.createRandomPolicyMetadata("mypolicy")), + Map.of("mypolicy", LifecyclePolicyMetadataTests.createRandomPolicyMetadata("mypolicy")), OperationMode.RUNNING ) ) @@ -72,7 +72,7 @@ public class LifecyclePolicyUtilsTests extends ESTestCase { .build(); assertThat( LifecyclePolicyUtils.calculateUsage(iner, project, "mypolicy"), - equalTo(new ItemUsage(Collections.singleton("myindex"), Collections.emptyList(), Collections.emptyList())) + equalTo(new ItemUsage(List.of("myindex"), List.of(), List.of())) ); } @@ -82,7 +82,7 @@ public class LifecyclePolicyUtilsTests extends ESTestCase { .putCustom( IndexLifecycleMetadata.TYPE, new IndexLifecycleMetadata( - Collections.singletonMap("mypolicy", LifecyclePolicyMetadataTests.createRandomPolicyMetadata("mypolicy")), + Map.of("mypolicy", LifecyclePolicyMetadataTests.createRandomPolicyMetadata("mypolicy")), OperationMode.RUNNING ) ) @@ -93,10 +93,10 @@ public class LifecyclePolicyUtilsTests extends ESTestCase { .putCustom( ComposableIndexTemplateMetadata.TYPE, new ComposableIndexTemplateMetadata( - Collections.singletonMap( + Map.of( "mytemplate", ComposableIndexTemplate.builder() - .indexPatterns(Collections.singletonList("myds")) + .indexPatterns(List.of("myds")) .template( new Template(Settings.builder().put(LifecycleSettings.LIFECYCLE_NAME, "mypolicy").build(), null, null) ) @@ -108,7 +108,7 @@ public class LifecyclePolicyUtilsTests extends ESTestCase { .build(); assertThat( LifecyclePolicyUtils.calculateUsage(iner, project, "mypolicy"), - equalTo(new ItemUsage(Collections.singleton("myindex"), Collections.emptyList(), Collections.singleton("mytemplate"))) + equalTo(new ItemUsage(List.of("myindex"), List.of(), List.of("mytemplate"))) ); } @@ -117,7 +117,7 @@ public class LifecyclePolicyUtilsTests extends ESTestCase { .putCustom( IndexLifecycleMetadata.TYPE, new IndexLifecycleMetadata( - Collections.singletonMap("mypolicy", LifecyclePolicyMetadataTests.createRandomPolicyMetadata("mypolicy")), + Map.of("mypolicy", LifecyclePolicyMetadataTests.createRandomPolicyMetadata("mypolicy")), OperationMode.RUNNING ) ) @@ -137,10 +137,10 @@ public class LifecyclePolicyUtilsTests extends ESTestCase { .putCustom( ComposableIndexTemplateMetadata.TYPE, new ComposableIndexTemplateMetadata( - Collections.singletonMap( + Map.of( "mytemplate", ComposableIndexTemplate.builder() - .indexPatterns(Collections.singletonList("myds")) + .indexPatterns(List.of("myds")) .template( new Template(Settings.builder().put(LifecycleSettings.LIFECYCLE_NAME, "mypolicy").build(), null, null) ) @@ -150,16 +150,12 @@ public class LifecyclePolicyUtilsTests extends ESTestCase { ) ); // Need to get the real Index instance of myindex: - projectBuilder.put( - DataStreamTestHelper.newInstance("myds", Collections.singletonList(projectBuilder.get("myindex").getIndex())) - ); + projectBuilder.put(DataStreamTestHelper.newInstance("myds", List.of(projectBuilder.get("myindex").getIndex()))); // Test where policy exists and is used by an index, datastream, and template assertThat( LifecyclePolicyUtils.calculateUsage(iner, projectBuilder.build(), "mypolicy"), - equalTo( - new ItemUsage(Arrays.asList("myindex", "another"), Collections.singleton("myds"), Collections.singleton("mytemplate")) - ) + equalTo(new ItemUsage(List.of("myindex", "another"), List.of("myds"), List.of("mytemplate"))) ); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/MockAction.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/MockAction.java index 0de234615f4c..79f8a051abe2 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/MockAction.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/MockAction.java @@ -15,10 +15,8 @@ import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Objects; -import java.util.stream.Collectors; public class MockAction implements LifecycleAction { public static final String NAME = "TEST_ACTION"; @@ -32,7 +30,7 @@ public class MockAction implements LifecycleAction { } public MockAction() { - this(Collections.emptyList()); + this(List.of()); } public MockAction(List steps) { @@ -77,7 +75,7 @@ public class MockAction implements LifecycleAction { @Override public void writeTo(StreamOutput out) throws IOException { - out.writeCollection(steps.stream().map(MockStep::new).collect(Collectors.toList())); + out.writeCollection(steps.stream().map(MockStep::new).toList()); out.writeBoolean(safe); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/OperationModeUpdateTaskTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/OperationModeUpdateTaskTests.java index 56b177670f41..585cf7325489 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/OperationModeUpdateTaskTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/OperationModeUpdateTaskTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.slm.SnapshotLifecycleMetadata; import org.elasticsearch.xpack.core.slm.SnapshotLifecycleStats; -import java.util.Collections; import java.util.Map; import java.util.Optional; @@ -97,9 +96,9 @@ public class OperationModeUpdateTaskTests extends ESTestCase { OperationMode requestMode, boolean assertSameClusterState ) { - IndexLifecycleMetadata indexLifecycleMetadata = new IndexLifecycleMetadata(Collections.emptyMap(), currentMode); + IndexLifecycleMetadata indexLifecycleMetadata = new IndexLifecycleMetadata(Map.of(), currentMode); SnapshotLifecycleMetadata snapshotLifecycleMetadata = new SnapshotLifecycleMetadata( - Collections.emptyMap(), + Map.of(), currentMode, new SnapshotLifecycleStats() ); @@ -133,9 +132,9 @@ public class OperationModeUpdateTaskTests extends ESTestCase { OperationMode requestMode, boolean assertSameClusterState ) { - IndexLifecycleMetadata indexLifecycleMetadata = new IndexLifecycleMetadata(Collections.emptyMap(), currentMode); + IndexLifecycleMetadata indexLifecycleMetadata = new IndexLifecycleMetadata(Map.of(), currentMode); SnapshotLifecycleMetadata snapshotLifecycleMetadata = new SnapshotLifecycleMetadata( - Collections.emptyMap(), + Map.of(), currentMode, new SnapshotLifecycleStats() ); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PauseFollowerIndexStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PauseFollowerIndexStepTests.java index 51ebc9817695..da5d6eddfc72 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PauseFollowerIndexStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PauseFollowerIndexStepTests.java @@ -22,7 +22,7 @@ import org.elasticsearch.xpack.core.ccr.action.PauseFollowAction; import org.elasticsearch.xpack.core.ccr.action.ShardFollowTask; import org.mockito.Mockito; -import java.util.Collections; +import java.util.Map; import static org.elasticsearch.xpack.core.ilm.UnfollowAction.CCR_METADATA_KEY; import static org.hamcrest.Matchers.equalTo; @@ -38,7 +38,7 @@ public class PauseFollowerIndexStepTests extends AbstractUnfollowIndexStepTestCa public void testPauseFollowingIndex() throws Exception { IndexMetadata indexMetadata = IndexMetadata.builder("follower-index") .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) - .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .putCustom(CCR_METADATA_KEY, Map.of()) .numberOfShards(1) .numberOfReplicas(0) .build(); @@ -60,7 +60,7 @@ public class PauseFollowerIndexStepTests extends AbstractUnfollowIndexStepTestCa public void testRequestNotAcknowledged() { IndexMetadata indexMetadata = IndexMetadata.builder("follower-index") .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) - .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .putCustom(CCR_METADATA_KEY, Map.of()) .numberOfShards(1) .numberOfReplicas(0) .build(); @@ -81,7 +81,7 @@ public class PauseFollowerIndexStepTests extends AbstractUnfollowIndexStepTestCa public void testPauseFollowingIndexFailed() { IndexMetadata indexMetadata = IndexMetadata.builder("follower-index") .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) - .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .putCustom(CCR_METADATA_KEY, Map.of()) .numberOfShards(1) .numberOfReplicas(0) .build(); @@ -107,7 +107,7 @@ public class PauseFollowerIndexStepTests extends AbstractUnfollowIndexStepTestCa public final void testNoShardFollowPersistentTasks() throws Exception { IndexMetadata indexMetadata = IndexMetadata.builder("managed-index") .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) - .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .putCustom(CCR_METADATA_KEY, Map.of()) .numberOfShards(1) .numberOfReplicas(0) .build(); @@ -138,7 +138,7 @@ public class PauseFollowerIndexStepTests extends AbstractUnfollowIndexStepTestCa IndexMetadata followerIndex = IndexMetadata.builder("follower-index") .settings(settings(IndexVersion.current())) - .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .putCustom(CCR_METADATA_KEY, Map.of()) .numberOfShards(1) .numberOfReplicas(0) .build(); @@ -171,7 +171,7 @@ public class PauseFollowerIndexStepTests extends AbstractUnfollowIndexStepTestCa new ByteSizeValue(512, ByteSizeUnit.MB), TimeValue.timeValueMillis(10), TimeValue.timeValueMillis(10), - Collections.emptyMap() + Map.of() ), null ); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PhaseCacheManagementTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PhaseCacheManagementTests.java index 733d8fa97359..48de17840c29 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PhaseCacheManagementTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PhaseCacheManagementTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; import java.io.IOException; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -85,9 +84,9 @@ public class PhaseCacheManagementTests extends ESTestCase { actions.put("rollover", new RolloverAction(null, null, null, 1L, null, null, null, null, null, null)); actions.put("set_priority", new SetPriorityAction(100)); Phase hotPhase = new Phase("hot", TimeValue.ZERO, actions); - Map phases = Collections.singletonMap("hot", hotPhase); + Map phases = Map.of("hot", hotPhase); LifecyclePolicy newPolicy = new LifecyclePolicy("my-policy", phases); - LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata(newPolicy, Collections.emptyMap(), 2L, 2L); + LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata(newPolicy, Map.of(), 2L, 2L); ClusterState existingState = ClusterState.builder(ClusterState.EMPTY_STATE) .metadata(Metadata.builder(Metadata.EMPTY_METADATA).put(meta, false).build()) @@ -316,7 +315,7 @@ public class PhaseCacheManagementTests extends ESTestCase { actions.put("rollover", new RolloverAction(null, null, null, 1L, null, null, null, null, null, null)); actions.put("set_priority", new SetPriorityAction(100)); Phase hotPhase = new Phase("hot", TimeValue.ZERO, actions); - Map phases = Collections.singletonMap("hot", hotPhase); + Map phases = Map.of("hot", hotPhase); LifecyclePolicy newPolicy = new LifecyclePolicy("my-policy", phases); assertTrue(isIndexPhaseDefinitionUpdatable(REGISTRY, client, meta, newPolicy, null)); @@ -352,7 +351,7 @@ public class PhaseCacheManagementTests extends ESTestCase { Map actions = new HashMap<>(); actions.put("set_priority", new SetPriorityAction(150)); Phase hotPhase = new Phase("hot", TimeValue.ZERO, actions); - Map phases = Collections.singletonMap("hot", hotPhase); + Map phases = Map.of("hot", hotPhase); LifecyclePolicy newPolicy = new LifecyclePolicy("my-policy", phases); assertFalse(isIndexPhaseDefinitionUpdatable(REGISTRY, client, meta, newPolicy, null)); @@ -391,7 +390,7 @@ public class PhaseCacheManagementTests extends ESTestCase { new RolloverAction(null, null, TimeValue.timeValueSeconds(5), null, null, null, null, null, null, null) ); Phase hotPhase = new Phase("hot", TimeValue.ZERO, actions); - Map phases = Collections.singletonMap("hot", hotPhase); + Map phases = Map.of("hot", hotPhase); LifecyclePolicy newPolicy = new LifecyclePolicy("my-policy", phases); assertFalse(isIndexPhaseDefinitionUpdatable(REGISTRY, client, meta, newPolicy, null)); @@ -423,7 +422,7 @@ public class PhaseCacheManagementTests extends ESTestCase { actions.put("rollover", new RolloverAction(null, null, null, 1L, null, null, null, null, null, null)); actions.put("set_priority", new SetPriorityAction(100)); Phase hotPhase = new Phase("hot", TimeValue.ZERO, actions); - Map phases = Collections.singletonMap("hot", hotPhase); + Map phases = Map.of("hot", hotPhase); LifecyclePolicy newPolicy = new LifecyclePolicy("my-policy", phases); assertFalse(isIndexPhaseDefinitionUpdatable(REGISTRY, client, meta, newPolicy, null)); @@ -444,7 +443,7 @@ public class PhaseCacheManagementTests extends ESTestCase { actions.put("rollover", new RolloverAction(null, null, null, 1L, null, null, null, null, null, null)); actions.put("set_priority", new SetPriorityAction(100)); Phase hotPhase = new Phase("hot", TimeValue.ZERO, actions); - Map phases = Collections.singletonMap("hot", hotPhase); + Map phases = Map.of("hot", hotPhase); LifecyclePolicy newPolicy = new LifecyclePolicy("my-policy", phases); assertFalse(isIndexPhaseDefinitionUpdatable(REGISTRY, client, meta, newPolicy, null)); @@ -483,16 +482,16 @@ public class PhaseCacheManagementTests extends ESTestCase { oldActions.put("rollover", new RolloverAction(null, null, null, 1L, null, null, null, null, null, null)); oldActions.put("set_priority", new SetPriorityAction(100)); Phase oldHotPhase = new Phase("hot", TimeValue.ZERO, oldActions); - Map oldPhases = Collections.singletonMap("hot", oldHotPhase); + Map oldPhases = Map.of("hot", oldHotPhase); LifecyclePolicy oldPolicy = new LifecyclePolicy("my-policy", oldPhases); Map actions = new HashMap<>(); actions.put("rollover", new RolloverAction(null, null, null, 1L, null, null, null, null, null, null)); actions.put("set_priority", new SetPriorityAction(100)); Phase hotPhase = new Phase("hot", TimeValue.ZERO, actions); - Map phases = Collections.singletonMap("hot", hotPhase); + Map phases = Map.of("hot", hotPhase); LifecyclePolicy newPolicy = new LifecyclePolicy("my-policy", phases); - LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata(newPolicy, Collections.emptyMap(), 2L, 2L); + LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata(newPolicy, Map.of(), 2L, 2L); assertTrue(isIndexPhaseDefinitionUpdatable(REGISTRY, client, meta, newPolicy, null)); @@ -510,9 +509,9 @@ public class PhaseCacheManagementTests extends ESTestCase { actions.put("rollover", new RolloverAction(null, null, null, 2L, null, null, null, null, null, null)); actions.put("set_priority", new SetPriorityAction(150)); hotPhase = new Phase("hot", TimeValue.ZERO, actions); - phases = Collections.singletonMap("hot", hotPhase); + phases = Map.of("hot", hotPhase); newPolicy = new LifecyclePolicy("my-policy", phases); - policyMetadata = new LifecyclePolicyMetadata(newPolicy, Collections.emptyMap(), 2L, 2L); + policyMetadata = new LifecyclePolicyMetadata(newPolicy, Map.of(), 2L, 2L); logger.info("--> update with changed policy, but not configured in settings"); updatedState = updateIndicesForPolicy(existingState, REGISTRY, client, oldPolicy, policyMetadata, null); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PhaseExecutionInfoTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PhaseExecutionInfoTests.java index 7622118d2b99..ce477a07c2f0 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PhaseExecutionInfoTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PhaseExecutionInfoTests.java @@ -18,7 +18,7 @@ import org.elasticsearch.xcontent.XContentParser; import org.junit.Before; import java.io.IOException; -import java.util.Arrays; +import java.util.List; public class PhaseExecutionInfoTests extends AbstractXContentSerializingTestCase { @@ -71,7 +71,7 @@ public class PhaseExecutionInfoTests extends AbstractXContentSerializingTestCase protected NamedWriteableRegistry getNamedWriteableRegistry() { return new NamedWriteableRegistry( - Arrays.asList(new NamedWriteableRegistry.Entry(LifecycleAction.class, MockAction.NAME, MockAction::new)) + List.of(new NamedWriteableRegistry.Entry(LifecycleAction.class, MockAction.NAME, MockAction::new)) ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PhaseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PhaseTests.java index bf925c4282fc..5a194b48f770 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PhaseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/PhaseTests.java @@ -18,9 +18,8 @@ import org.elasticsearch.xcontent.XContentParser; import org.junit.Before; import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; @@ -42,9 +41,9 @@ public class PhaseTests extends AbstractXContentSerializingTestCase { if (randomBoolean()) { after = randomTimeValue(0, 1_000_000_000, TimeUnit.SECONDS, TimeUnit.MINUTES, TimeUnit.HOURS, TimeUnit.DAYS); } - Map actions = Collections.emptyMap(); + Map actions = Map.of(); if (randomBoolean()) { - actions = Collections.singletonMap(MockAction.NAME, new MockAction()); + actions = Map.of(MockAction.NAME, new MockAction()); } return new Phase(phaseName, after, actions); } @@ -61,7 +60,7 @@ public class PhaseTests extends AbstractXContentSerializingTestCase { protected NamedWriteableRegistry getNamedWriteableRegistry() { return new NamedWriteableRegistry( - Arrays.asList(new NamedWriteableRegistry.Entry(LifecycleAction.class, MockAction.NAME, MockAction::new)) + List.of(new NamedWriteableRegistry.Entry(LifecycleAction.class, MockAction.NAME, MockAction::new)) ); } @@ -85,7 +84,7 @@ public class PhaseTests extends AbstractXContentSerializingTestCase { case 1 -> after = TimeValue.timeValueSeconds(after.getSeconds() + randomIntBetween(1, 1000)); case 2 -> { actions = new HashMap<>(actions); - actions.put(MockAction.NAME + "another", new MockAction(Collections.emptyList())); + actions.put(MockAction.NAME + "another", new MockAction(List.of())); } default -> throw new AssertionError("Illegal randomisation branch"); } @@ -93,7 +92,7 @@ public class PhaseTests extends AbstractXContentSerializingTestCase { } public void testDefaultAfter() { - Phase phase = new Phase(randomAlphaOfLength(20), null, Collections.emptyMap()); + Phase phase = new Phase(randomAlphaOfLength(20), null, Map.of()); assertEquals(TimeValue.ZERO, phase.getMinimumAge()); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/RolloverStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/RolloverStepTests.java index 4af25d094f5f..3683690763d9 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/RolloverStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/RolloverStepTests.java @@ -23,9 +23,9 @@ import org.elasticsearch.xpack.core.ilm.Step.StepKey; import org.hamcrest.Matchers; import org.mockito.Mockito; -import java.util.Collections; import java.util.List; import java.util.Locale; +import java.util.Map; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.newInstance; import static org.mockito.Mockito.verifyNoMoreInteractions; @@ -185,7 +185,7 @@ public class RolloverStepTests extends AbstractStepTestCase { @SuppressWarnings("unchecked") ActionListener listener = (ActionListener) invocation.getArguments()[1]; assertRolloverIndexRequest(request, rolloverTarget); - listener.onResponse(new RolloverResponse(null, null, Collections.emptyMap(), request.isDryRun(), true, true, true, false)); + listener.onResponse(new RolloverResponse(null, null, Map.of(), request.isDryRun(), true, true, true, false)); return null; }).when(indicesClient).rolloverIndex(Mockito.any(), Mockito.any()); } @@ -214,11 +214,7 @@ public class RolloverStepTests extends AbstractStepTestCase { .putAlias(AliasMetadata.builder(rolloverAlias)) .settings(settings(IndexVersion.current()).put(RolloverAction.LIFECYCLE_ROLLOVER_ALIAS, rolloverAlias)) .putRolloverInfo( - new RolloverInfo( - rolloverAlias, - Collections.singletonList(new MaxSizeCondition(ByteSizeValue.ofBytes(2L))), - System.currentTimeMillis() - ) + new RolloverInfo(rolloverAlias, List.of(new MaxSizeCondition(ByteSizeValue.ofBytes(2L))), System.currentTimeMillis()) ) .numberOfShards(randomIntBetween(1, 5)) .numberOfReplicas(randomIntBetween(0, 5)) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SegmentCountStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SegmentCountStepTests.java index 1d14bfb261fc..9f04e202022c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SegmentCountStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SegmentCountStepTests.java @@ -77,14 +77,14 @@ public class SegmentCountStepTests extends AbstractStepTestCase indexShards = Collections.singletonMap(0, indexShardSegments); + Map indexShards = Map.of(0, indexShardSegments); Spliterator iss = indexShards.values().spliterator(); List segments = new ArrayList<>(); for (int i = 0; i < maxNumSegments - randomIntBetween(0, 3); i++) { segments.add(null); } Mockito.when(indicesSegmentResponse.getStatus()).thenReturn(RestStatus.OK); - Mockito.when(indicesSegmentResponse.getIndices()).thenReturn(Collections.singletonMap(index.getName(), indexSegments)); + Mockito.when(indicesSegmentResponse.getIndices()).thenReturn(Map.of(index.getName(), indexSegments)); Mockito.when(indexSegments.spliterator()).thenReturn(iss); Mockito.when(shardSegmentsOne.getSegments()).thenReturn(segments); @@ -129,14 +129,14 @@ public class SegmentCountStepTests extends AbstractStepTestCase indexShards = Collections.singletonMap(0, indexShardSegments); + Map indexShards = Map.of(0, indexShardSegments); Spliterator iss = indexShards.values().spliterator(); List segments = new ArrayList<>(); for (int i = 0; i < maxNumSegments + randomIntBetween(1, 3); i++) { segments.add(null); } Mockito.when(indicesSegmentResponse.getStatus()).thenReturn(RestStatus.OK); - Mockito.when(indicesSegmentResponse.getIndices()).thenReturn(Collections.singletonMap(index.getName(), indexSegments)); + Mockito.when(indicesSegmentResponse.getIndices()).thenReturn(Map.of(index.getName(), indexSegments)); Mockito.when(indexSegments.spliterator()).thenReturn(iss); Mockito.when(shardSegmentsOne.getSegments()).thenReturn(segments); @@ -181,7 +181,7 @@ public class SegmentCountStepTests extends AbstractStepTestCase indexShards = Collections.singletonMap(0, indexShardSegments); + Map indexShards = Map.of(0, indexShardSegments); Spliterator iss = indexShards.values().spliterator(); List segments = new ArrayList<>(); for (int i = 0; i < maxNumSegments + randomIntBetween(1, 3); i++) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkActionTests.java index f7b29f21e5f1..1bfe96bf0445 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkActionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkActionTests.java @@ -28,8 +28,8 @@ import org.junit.Before; import org.mockito.Mockito; import java.io.IOException; -import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; @@ -203,11 +203,11 @@ public class ShrinkActionTests extends AbstractActionTestCase { LifecyclePolicy policy = new LifecyclePolicy( lifecycleName, - Collections.singletonMap("warm", new Phase("warm", TimeValue.ZERO, Collections.singletonMap(action.getWriteableName(), action))) + Map.of("warm", new Phase("warm", TimeValue.ZERO, Map.of(action.getWriteableName(), action))) ); LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata( policy, - Collections.emptyMap(), + Map.of(), randomNonNegativeLong(), randomNonNegativeLong() ); @@ -216,10 +216,7 @@ public class ShrinkActionTests extends AbstractActionTestCase { Metadata.builder() .putCustom( IndexLifecycleMetadata.TYPE, - new IndexLifecycleMetadata( - Collections.singletonMap(policyMetadata.getName(), policyMetadata), - OperationMode.RUNNING - ) + new IndexLifecycleMetadata(Map.of(policyMetadata.getName(), policyMetadata), OperationMode.RUNNING) ) .put( indexMetadataBuilder.putCustom( diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkSetAliasStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkSetAliasStepTests.java index 7a03343b461d..c8efce288260 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkSetAliasStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkSetAliasStepTests.java @@ -17,7 +17,6 @@ import org.elasticsearch.xpack.core.ilm.Step.StepKey; import org.mockito.Mockito; import org.mockito.stubbing.Answer; -import java.util.Arrays; import java.util.List; import static org.elasticsearch.xpack.core.ilm.ShrinkIndexNameSupplier.SHRUNKEN_INDEX_PREFIX; @@ -73,7 +72,7 @@ public class ShrinkSetAliasStepTests extends AbstractStepTestCase expectedAliasActions = Arrays.asList( + List expectedAliasActions = List.of( IndicesAliasesRequest.AliasActions.removeIndex().index(sourceIndex), IndicesAliasesRequest.AliasActions.add().index(shrunkenIndex).alias(sourceIndex), IndicesAliasesRequest.AliasActions.add() diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkStepTests.java index 257df32b4d95..b138339c2519 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkStepTests.java @@ -21,8 +21,8 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.xpack.core.ilm.Step.StepKey; import org.mockito.Mockito; -import java.util.Collections; import java.util.Map; +import java.util.Set; import static org.elasticsearch.cluster.metadata.LifecycleExecutionState.ILM_CUSTOM_METADATA_KEY; import static org.elasticsearch.common.IndexNameGenerator.generateValidIndexName; @@ -101,7 +101,7 @@ public class ShrinkStepTests extends AbstractStepTestCase { @SuppressWarnings("unchecked") ActionListener listener = (ActionListener) invocation.getArguments()[1]; assertThat(request.getSourceIndex(), equalTo(sourceIndexMetadata.getIndex().getName())); - assertThat(request.getTargetIndexRequest().aliases(), equalTo(Collections.emptySet())); + assertThat(request.getTargetIndexRequest().aliases(), equalTo(Set.of())); Settings.Builder builder = Settings.builder(); builder.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, sourceIndexMetadata.getNumberOfReplicas()) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SwapAliasesAndDeleteSourceIndexStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SwapAliasesAndDeleteSourceIndexStepTests.java index f9f06b10ad2f..1a99043b86ad 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SwapAliasesAndDeleteSourceIndexStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SwapAliasesAndDeleteSourceIndexStepTests.java @@ -23,7 +23,6 @@ import org.elasticsearch.test.client.NoOpClient; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ilm.Step.StepKey; -import java.util.Arrays; import java.util.List; import java.util.function.BiFunction; @@ -92,7 +91,7 @@ public class SwapAliasesAndDeleteSourceIndexStepTests extends AbstractStepTestCa String targetIndexPrefix = "index_prefix"; String targetIndexName = targetIndexPrefix + sourceIndexName; - List expectedAliasActions = Arrays.asList( + List expectedAliasActions = List.of( AliasActions.removeIndex().index(sourceIndexName), AliasActions.add().index(targetIndexName).alias(sourceIndexName), AliasActions.add() diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleTypeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleTypeTests.java index 55fa3792fa6c..f7d1ff5294f5 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleTypeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/TimeseriesLifecycleTypeTests.java @@ -13,9 +13,7 @@ import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInter import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -51,13 +49,7 @@ import static org.hamcrest.Matchers.notNullValue; public class TimeseriesLifecycleTypeTests extends ESTestCase { - private static final AllocateAction TEST_ALLOCATE_ACTION = new AllocateAction( - 2, - 20, - Collections.singletonMap("node", "node1"), - null, - null - ); + private static final AllocateAction TEST_ALLOCATE_ACTION = new AllocateAction(2, 20, Map.of("node", "node1"), null, null); private static final DeleteAction TEST_DELETE_ACTION = DeleteAction.WITH_SNAPSHOT_DELETE; private static final WaitForSnapshotAction TEST_WAIT_FOR_SNAPSHOT_ACTION = new WaitForSnapshotAction("policy"); @@ -91,7 +83,7 @@ public class TimeseriesLifecycleTypeTests extends ESTestCase { if (invalid) { phaseName += randomAlphaOfLength(5); } - Map phases = Collections.singletonMap(phaseName, new Phase(phaseName, TimeValue.ZERO, Collections.emptyMap())); + Map phases = Map.of(phaseName, new Phase(phaseName, TimeValue.ZERO, Map.of())); if (invalid) { Exception e = expectThrows(IllegalArgumentException.class, () -> TimeseriesLifecycleType.INSTANCE.validate(phases.values())); assertThat(e.getMessage(), equalTo("Timeseries lifecycle does not support phase [" + phaseName + "]")); @@ -109,7 +101,7 @@ public class TimeseriesLifecycleTypeTests extends ESTestCase { invalidAction = getTestAction(randomFrom("allocate", "delete", "freeze")); actions.put(invalidAction.getWriteableName(), invalidAction); } - Map hotPhase = Collections.singletonMap("hot", new Phase("hot", TimeValue.ZERO, actions)); + Map hotPhase = Map.of("hot", new Phase("hot", TimeValue.ZERO, actions)); if (invalidAction != null) { Exception e = expectThrows(IllegalArgumentException.class, () -> TimeseriesLifecycleType.INSTANCE.validate(hotPhase.values())); @@ -123,14 +115,14 @@ public class TimeseriesLifecycleTypeTests extends ESTestCase { final Map hotActionMap = hotActions.stream() .map(this::getTestAction) .collect(Collectors.toMap(LifecycleAction::getWriteableName, Function.identity())); - TimeseriesLifecycleType.INSTANCE.validate(Collections.singleton(new Phase("hot", TimeValue.ZERO, hotActionMap))); + TimeseriesLifecycleType.INSTANCE.validate(List.of(new Phase("hot", TimeValue.ZERO, hotActionMap))); }; - validateHotActions.accept(Arrays.asList(RolloverAction.NAME)); - validateHotActions.accept(Arrays.asList(RolloverAction.NAME, ForceMergeAction.NAME)); + validateHotActions.accept(List.of(RolloverAction.NAME)); + validateHotActions.accept(List.of(RolloverAction.NAME, ForceMergeAction.NAME)); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> validateHotActions.accept(Arrays.asList(ForceMergeAction.NAME)) + () -> validateHotActions.accept(List.of(ForceMergeAction.NAME)) ); assertThat( e.getMessage(), @@ -148,7 +140,7 @@ public class TimeseriesLifecycleTypeTests extends ESTestCase { invalidAction = getTestAction(randomFrom("rollover", "delete", "freeze")); actions.put(invalidAction.getWriteableName(), invalidAction); } - Map warmPhase = Collections.singletonMap("warm", new Phase("warm", TimeValue.ZERO, actions)); + Map warmPhase = Map.of("warm", new Phase("warm", TimeValue.ZERO, actions)); if (invalidAction != null) { Exception e = expectThrows(IllegalArgumentException.class, () -> TimeseriesLifecycleType.INSTANCE.validate(warmPhase.values())); @@ -167,7 +159,7 @@ public class TimeseriesLifecycleTypeTests extends ESTestCase { invalidAction = getTestAction(randomFrom("rollover", "delete", "forcemerge", "shrink")); actions.put(invalidAction.getWriteableName(), invalidAction); } - Map coldPhase = Collections.singletonMap("cold", new Phase("cold", TimeValue.ZERO, actions)); + Map coldPhase = Map.of("cold", new Phase("cold", TimeValue.ZERO, actions)); if (invalidAction != null) { Exception e = expectThrows(IllegalArgumentException.class, () -> TimeseriesLifecycleType.INSTANCE.validate(coldPhase.values())); @@ -188,7 +180,7 @@ public class TimeseriesLifecycleTypeTests extends ESTestCase { invalidAction = getTestAction(randomFrom("rollover", "delete", "forcemerge", "shrink")); actions.put(invalidAction.getWriteableName(), invalidAction); } - Map frozenPhase = Collections.singletonMap("frozen", new Phase("frozen", TimeValue.ZERO, actions)); + Map frozenPhase = Map.of("frozen", new Phase("frozen", TimeValue.ZERO, actions)); if (invalidAction != null) { Exception e = expectThrows( @@ -210,7 +202,7 @@ public class TimeseriesLifecycleTypeTests extends ESTestCase { invalidAction = getTestAction(randomFrom("allocate", "rollover", "forcemerge", "shrink", "freeze", "set_priority")); actions.put(invalidAction.getWriteableName(), invalidAction); } - Map deletePhase = Collections.singletonMap("delete", new Phase("delete", TimeValue.ZERO, actions)); + Map deletePhase = Map.of("delete", new Phase("delete", TimeValue.ZERO, actions)); if (invalidAction != null) { Exception e = expectThrows( @@ -459,7 +451,7 @@ public class TimeseriesLifecycleTypeTests extends ESTestCase { public void testGetOrderedPhases() { Map phaseMap = new HashMap<>(); for (String phaseName : randomSubsetOf(randomIntBetween(0, ORDERED_VALID_PHASES.size()), ORDERED_VALID_PHASES)) { - phaseMap.put(phaseName, new Phase(phaseName, TimeValue.ZERO, Collections.emptyMap())); + phaseMap.put(phaseName, new Phase(phaseName, TimeValue.ZERO, Map.of())); } assertTrue(isSorted(TimeseriesLifecycleType.INSTANCE.getOrderedPhases(phaseMap), Phase::getName, ORDERED_VALID_PHASES)); @@ -509,7 +501,7 @@ public class TimeseriesLifecycleTypeTests extends ESTestCase { public void testGetOrderedActionsInvalidPhase() { IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, - () -> TimeseriesLifecycleType.INSTANCE.getOrderedActions(new Phase("invalid", TimeValue.ZERO, Collections.emptyMap())) + () -> TimeseriesLifecycleType.INSTANCE.getOrderedActions(new Phase("invalid", TimeValue.ZERO, Map.of())) ); assertThat(exception.getMessage(), equalTo("lifecycle type [timeseries] does not support phase [invalid]")); } @@ -583,25 +575,25 @@ public class TimeseriesLifecycleTypeTests extends ESTestCase { { // not inject in hot phase - Phase phase = new Phase(HOT_PHASE, TimeValue.ZERO, Collections.emptyMap()); + Phase phase = new Phase(HOT_PHASE, TimeValue.ZERO, Map.of()); assertThat(TimeseriesLifecycleType.shouldInjectMigrateStepForPhase(phase), is(false)); } { // not inject in frozen phase - Phase phase = new Phase(FROZEN_PHASE, TimeValue.ZERO, Collections.emptyMap()); + Phase phase = new Phase(FROZEN_PHASE, TimeValue.ZERO, Map.of()); assertThat(TimeseriesLifecycleType.shouldInjectMigrateStepForPhase(phase), is(false)); } { // not inject in delete phase - Phase phase = new Phase(DELETE_PHASE, TimeValue.ZERO, Collections.emptyMap()); + Phase phase = new Phase(DELETE_PHASE, TimeValue.ZERO, Map.of()); assertThat(TimeseriesLifecycleType.shouldInjectMigrateStepForPhase(phase), is(false)); } { // return false for invalid phase - Phase phase = new Phase(HOT_PHASE + randomAlphaOfLength(5), TimeValue.ZERO, Collections.emptyMap()); + Phase phase = new Phase(HOT_PHASE + randomAlphaOfLength(5), TimeValue.ZERO, Map.of()); assertThat(TimeseriesLifecycleType.shouldInjectMigrateStepForPhase(phase), is(false)); } } @@ -620,7 +612,7 @@ public class TimeseriesLifecycleTypeTests extends ESTestCase { Phase coldPhase = new Phase(HOT_PHASE, TimeValue.ZERO, coldActions); Phase frozenPhase = new Phase(HOT_PHASE, TimeValue.ZERO, frozenActions); - validateAllSearchableSnapshotActionsUseSameRepository(Arrays.asList(hotPhase, coldPhase, frozenPhase)); + validateAllSearchableSnapshotActionsUseSameRepository(List.of(hotPhase, coldPhase, frozenPhase)); } { @@ -634,7 +626,7 @@ public class TimeseriesLifecycleTypeTests extends ESTestCase { IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> validateAllSearchableSnapshotActionsUseSameRepository(Arrays.asList(hotPhase, coldPhase, frozenPhase)) + () -> validateAllSearchableSnapshotActionsUseSameRepository(List.of(hotPhase, coldPhase, frozenPhase)) ); assertThat( e.getMessage(), @@ -649,25 +641,25 @@ public class TimeseriesLifecycleTypeTests extends ESTestCase { public void testValidatingIncreasingAges() { { - Phase hotPhase = new Phase(HOT_PHASE, TimeValue.timeValueDays(1), Collections.emptyMap()); - Phase warmPhase = new Phase(WARM_PHASE, TimeValue.ZERO, Collections.emptyMap()); - Phase coldPhase = new Phase(COLD_PHASE, TimeValue.ZERO, Collections.emptyMap()); - Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.ZERO, Collections.emptyMap()); - Phase deletePhase = new Phase(DELETE_PHASE, TimeValue.ZERO, Collections.emptyMap()); + Phase hotPhase = new Phase(HOT_PHASE, TimeValue.timeValueDays(1), Map.of()); + Phase warmPhase = new Phase(WARM_PHASE, TimeValue.ZERO, Map.of()); + Phase coldPhase = new Phase(COLD_PHASE, TimeValue.ZERO, Map.of()); + Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.ZERO, Map.of()); + Phase deletePhase = new Phase(DELETE_PHASE, TimeValue.ZERO, Map.of()); assertFalse( Strings.hasText( - validateMonotonicallyIncreasingPhaseTimings(Arrays.asList(hotPhase, warmPhase, coldPhase, frozenPhase, deletePhase)) + validateMonotonicallyIncreasingPhaseTimings(List.of(hotPhase, warmPhase, coldPhase, frozenPhase, deletePhase)) ) ); } { - Phase hotPhase = new Phase(HOT_PHASE, TimeValue.timeValueDays(1), Collections.emptyMap()); - Phase warmPhase = new Phase(WARM_PHASE, TimeValue.timeValueDays(1), Collections.emptyMap()); - Phase coldPhase = new Phase(COLD_PHASE, TimeValue.timeValueDays(1), Collections.emptyMap()); - Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.timeValueDays(1), Collections.emptyMap()); - Phase deletePhase = new Phase(DELETE_PHASE, TimeValue.timeValueDays(1), Collections.emptyMap()); + Phase hotPhase = new Phase(HOT_PHASE, TimeValue.timeValueDays(1), Map.of()); + Phase warmPhase = new Phase(WARM_PHASE, TimeValue.timeValueDays(1), Map.of()); + Phase coldPhase = new Phase(COLD_PHASE, TimeValue.timeValueDays(1), Map.of()); + Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.timeValueDays(1), Map.of()); + Phase deletePhase = new Phase(DELETE_PHASE, TimeValue.timeValueDays(1), Map.of()); List phases = new ArrayList<>(); phases.add(hotPhase); @@ -687,15 +679,13 @@ public class TimeseriesLifecycleTypeTests extends ESTestCase { } { - Phase hotPhase = new Phase(HOT_PHASE, TimeValue.timeValueDays(1), Collections.emptyMap()); - Phase warmPhase = new Phase(WARM_PHASE, TimeValue.ZERO, Collections.emptyMap()); - Phase coldPhase = new Phase(COLD_PHASE, TimeValue.timeValueHours(12), Collections.emptyMap()); - Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.ZERO, Collections.emptyMap()); - Phase deletePhase = new Phase(DELETE_PHASE, TimeValue.ZERO, Collections.emptyMap()); + Phase hotPhase = new Phase(HOT_PHASE, TimeValue.timeValueDays(1), Map.of()); + Phase warmPhase = new Phase(WARM_PHASE, TimeValue.ZERO, Map.of()); + Phase coldPhase = new Phase(COLD_PHASE, TimeValue.timeValueHours(12), Map.of()); + Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.ZERO, Map.of()); + Phase deletePhase = new Phase(DELETE_PHASE, TimeValue.ZERO, Map.of()); - String err = validateMonotonicallyIncreasingPhaseTimings( - Arrays.asList(hotPhase, warmPhase, coldPhase, frozenPhase, deletePhase) - ); + String err = validateMonotonicallyIncreasingPhaseTimings(List.of(hotPhase, warmPhase, coldPhase, frozenPhase, deletePhase)); assertThat( err, @@ -708,15 +698,13 @@ public class TimeseriesLifecycleTypeTests extends ESTestCase { } { - Phase hotPhase = new Phase(HOT_PHASE, TimeValue.timeValueDays(1), Collections.emptyMap()); - Phase warmPhase = new Phase(WARM_PHASE, TimeValue.timeValueDays(3), Collections.emptyMap()); - Phase coldPhase = new Phase(COLD_PHASE, null, Collections.emptyMap()); - Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.timeValueDays(1), Collections.emptyMap()); - Phase deletePhase = new Phase(DELETE_PHASE, TimeValue.timeValueDays(2), Collections.emptyMap()); + Phase hotPhase = new Phase(HOT_PHASE, TimeValue.timeValueDays(1), Map.of()); + Phase warmPhase = new Phase(WARM_PHASE, TimeValue.timeValueDays(3), Map.of()); + Phase coldPhase = new Phase(COLD_PHASE, null, Map.of()); + Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.timeValueDays(1), Map.of()); + Phase deletePhase = new Phase(DELETE_PHASE, TimeValue.timeValueDays(2), Map.of()); - String err = validateMonotonicallyIncreasingPhaseTimings( - Arrays.asList(hotPhase, warmPhase, coldPhase, frozenPhase, deletePhase) - ); + String err = validateMonotonicallyIncreasingPhaseTimings(List.of(hotPhase, warmPhase, coldPhase, frozenPhase, deletePhase)); assertThat( err, @@ -729,15 +717,13 @@ public class TimeseriesLifecycleTypeTests extends ESTestCase { } { - Phase hotPhase = new Phase(HOT_PHASE, TimeValue.timeValueDays(1), Collections.emptyMap()); - Phase warmPhase = new Phase(WARM_PHASE, TimeValue.timeValueDays(3), Collections.emptyMap()); - Phase coldPhase = new Phase(COLD_PHASE, null, Collections.emptyMap()); - Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.timeValueDays(2), Collections.emptyMap()); - Phase deletePhase = new Phase(DELETE_PHASE, TimeValue.timeValueDays(1), Collections.emptyMap()); + Phase hotPhase = new Phase(HOT_PHASE, TimeValue.timeValueDays(1), Map.of()); + Phase warmPhase = new Phase(WARM_PHASE, TimeValue.timeValueDays(3), Map.of()); + Phase coldPhase = new Phase(COLD_PHASE, null, Map.of()); + Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.timeValueDays(2), Map.of()); + Phase deletePhase = new Phase(DELETE_PHASE, TimeValue.timeValueDays(1), Map.of()); - String err = validateMonotonicallyIncreasingPhaseTimings( - Arrays.asList(hotPhase, warmPhase, coldPhase, frozenPhase, deletePhase) - ); + String err = validateMonotonicallyIncreasingPhaseTimings(List.of(hotPhase, warmPhase, coldPhase, frozenPhase, deletePhase)); assertThat( err, @@ -750,15 +736,13 @@ public class TimeseriesLifecycleTypeTests extends ESTestCase { } { - Phase hotPhase = new Phase(HOT_PHASE, TimeValue.timeValueDays(3), Collections.emptyMap()); - Phase warmPhase = new Phase(WARM_PHASE, TimeValue.timeValueDays(2), Collections.emptyMap()); - Phase coldPhase = new Phase(COLD_PHASE, null, Collections.emptyMap()); - Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.timeValueDays(2), Collections.emptyMap()); - Phase deletePhase = new Phase(DELETE_PHASE, TimeValue.timeValueDays(1), Collections.emptyMap()); + Phase hotPhase = new Phase(HOT_PHASE, TimeValue.timeValueDays(3), Map.of()); + Phase warmPhase = new Phase(WARM_PHASE, TimeValue.timeValueDays(2), Map.of()); + Phase coldPhase = new Phase(COLD_PHASE, null, Map.of()); + Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.timeValueDays(2), Map.of()); + Phase deletePhase = new Phase(DELETE_PHASE, TimeValue.timeValueDays(1), Map.of()); - String err = validateMonotonicallyIncreasingPhaseTimings( - Arrays.asList(hotPhase, warmPhase, coldPhase, frozenPhase, deletePhase) - ); + String err = validateMonotonicallyIncreasingPhaseTimings(List.of(hotPhase, warmPhase, coldPhase, frozenPhase, deletePhase)); assertThat( err, @@ -772,15 +756,13 @@ public class TimeseriesLifecycleTypeTests extends ESTestCase { } { - Phase hotPhase = new Phase(HOT_PHASE, TimeValue.timeValueDays(3), Collections.emptyMap()); - Phase warmPhase = new Phase(WARM_PHASE, TimeValue.timeValueDays(2), Collections.emptyMap()); - Phase coldPhase = new Phase(COLD_PHASE, TimeValue.timeValueDays(2), Collections.emptyMap()); - Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.timeValueDays(2), Collections.emptyMap()); - Phase deletePhase = new Phase(DELETE_PHASE, TimeValue.timeValueDays(1), Collections.emptyMap()); + Phase hotPhase = new Phase(HOT_PHASE, TimeValue.timeValueDays(3), Map.of()); + Phase warmPhase = new Phase(WARM_PHASE, TimeValue.timeValueDays(2), Map.of()); + Phase coldPhase = new Phase(COLD_PHASE, TimeValue.timeValueDays(2), Map.of()); + Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.timeValueDays(2), Map.of()); + Phase deletePhase = new Phase(DELETE_PHASE, TimeValue.timeValueDays(1), Map.of()); - String err = validateMonotonicallyIncreasingPhaseTimings( - Arrays.asList(hotPhase, warmPhase, coldPhase, frozenPhase, deletePhase) - ); + String err = validateMonotonicallyIncreasingPhaseTimings(List.of(hotPhase, warmPhase, coldPhase, frozenPhase, deletePhase)); assertThat( err, @@ -799,7 +781,7 @@ public class TimeseriesLifecycleTypeTests extends ESTestCase { Map frozenActions = new HashMap<>(); frozenActions.put(SearchableSnapshotAction.NAME, new SearchableSnapshotAction("repo1", randomBoolean())); Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.ZERO, frozenActions); - validateFrozenPhaseHasSearchableSnapshotAction(Collections.singleton(frozenPhase)); + validateFrozenPhaseHasSearchableSnapshotAction(List.of(frozenPhase)); } { @@ -807,7 +789,7 @@ public class TimeseriesLifecycleTypeTests extends ESTestCase { Phase frozenPhase = new Phase(FROZEN_PHASE, TimeValue.ZERO, frozenActions); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> validateFrozenPhaseHasSearchableSnapshotAction(Collections.singleton(frozenPhase)) + () -> validateFrozenPhaseHasSearchableSnapshotAction(List.of(frozenPhase)) ); assertThat( e.getMessage(), diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/UnfollowFollowerIndexStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/UnfollowFollowerIndexStepTests.java index 71f7ea2925f1..8e40d3af86d8 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/UnfollowFollowerIndexStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/UnfollowFollowerIndexStepTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.xpack.core.ccr.action.UnfollowAction; import org.mockito.Mockito; -import java.util.Collections; +import java.util.Map; import static org.elasticsearch.xpack.core.ilm.UnfollowAction.CCR_METADATA_KEY; import static org.hamcrest.Matchers.equalTo; @@ -30,7 +30,7 @@ public class UnfollowFollowerIndexStepTests extends AbstractUnfollowIndexStepTes public void testUnFollow() throws Exception { IndexMetadata indexMetadata = IndexMetadata.builder("follower-index") .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) - .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .putCustom(CCR_METADATA_KEY, Map.of()) .numberOfShards(1) .numberOfReplicas(0) .build(); @@ -51,7 +51,7 @@ public class UnfollowFollowerIndexStepTests extends AbstractUnfollowIndexStepTes public void testRequestNotAcknowledged() { IndexMetadata indexMetadata = IndexMetadata.builder("follower-index") .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) - .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .putCustom(CCR_METADATA_KEY, Map.of()) .numberOfShards(1) .numberOfReplicas(0) .build(); @@ -71,7 +71,7 @@ public class UnfollowFollowerIndexStepTests extends AbstractUnfollowIndexStepTes public void testUnFollowUnfollowFailed() { IndexMetadata indexMetadata = IndexMetadata.builder("follower-index") .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) - .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .putCustom(CCR_METADATA_KEY, Map.of()) .numberOfShards(1) .numberOfReplicas(0) .build(); @@ -93,7 +93,7 @@ public class UnfollowFollowerIndexStepTests extends AbstractUnfollowIndexStepTes public void testFailureToReleaseRetentionLeases() throws Exception { IndexMetadata indexMetadata = IndexMetadata.builder("follower-index") .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_INDEXING_COMPLETE, "true")) - .putCustom(CCR_METADATA_KEY, Collections.emptyMap()) + .putCustom(CCR_METADATA_KEY, Map.of()) .numberOfShards(1) .numberOfReplicas(0) .build(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/UpdateRolloverLifecycleDateStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/UpdateRolloverLifecycleDateStepTests.java index 3c618481df3d..1b08a90c6471 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/UpdateRolloverLifecycleDateStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/UpdateRolloverLifecycleDateStepTests.java @@ -17,7 +17,6 @@ import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.xpack.core.ilm.Step.StepKey; -import java.util.Collections; import java.util.List; import java.util.function.LongSupplier; @@ -68,7 +67,7 @@ public class UpdateRolloverLifecycleDateStepTests extends AbstractStepTestCase randomNonNegativeLong()); String dataStreamName = "test-datastream"; IndexMetadata originalIndexMeta = IndexMetadata.builder(DataStream.getDefaultBackingIndexName(dataStreamName, 1)) - .putRolloverInfo(new RolloverInfo(dataStreamName, Collections.emptyList(), rolloverTime)) + .putRolloverInfo(new RolloverInfo(dataStreamName, List.of(), rolloverTime)) .settings(settings(IndexVersion.current())) .numberOfShards(randomIntBetween(1, 5)) .numberOfReplicas(randomIntBetween(0, 5)) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForDataTierStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForDataTierStepTests.java index 00012575ea5d..2635e14b52eb 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForDataTierStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForDataTierStepTests.java @@ -64,9 +64,7 @@ public class WaitForDataTierStepTests extends AbstractStepTestCase otherTiers = DataTier.ALL_DATA_TIERS.stream() - .filter(tier -> notIncludedTier.equals(tier) == false) - .collect(Collectors.toList()); + List otherTiers = DataTier.ALL_DATA_TIERS.stream().filter(tier -> notIncludedTier.equals(tier) == false).toList(); List includedTiers = randomSubsetOf(between(1, otherTiers.size()), otherTiers); String tierPreference = String.join(",", includedTiers); WaitForDataTierStep step = new WaitForDataTierStep(randomStepKey(), randomStepKey(), tierPreference); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForFollowShardTasksStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForFollowShardTasksStepTests.java index 4ac5511a247c..ba9450866777 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForFollowShardTasksStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForFollowShardTasksStepTests.java @@ -16,9 +16,9 @@ import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction; import org.elasticsearch.xpack.core.ilm.Step.StepKey; import org.mockito.Mockito; -import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.Map; import static org.elasticsearch.xpack.core.ilm.UnfollowAction.CCR_METADATA_KEY; import static org.hamcrest.Matchers.equalTo; @@ -57,11 +57,11 @@ public class WaitForFollowShardTasksStepTests extends AbstractStepTestCase statsResponses = Arrays.asList( + List statsResponses = List.of( new FollowStatsAction.StatsResponse(createShardFollowTaskStatus(0, 9, 9)), new FollowStatsAction.StatsResponse(createShardFollowTaskStatus(1, 3, 3)) ); @@ -96,11 +96,11 @@ public class WaitForFollowShardTasksStepTests extends AbstractStepTestCase statsResponses = Arrays.asList( + List statsResponses = List.of( new FollowStatsAction.StatsResponse(createShardFollowTaskStatus(0, 9, 9)), new FollowStatsAction.StatsResponse(createShardFollowTaskStatus(1, 8, 3)) ); @@ -214,7 +214,7 @@ public class WaitForFollowShardTasksStepTests extends AbstractStepTestCase listener = (ActionListener) invocationOnMock .getArguments()[2]; - listener.onResponse(new FollowStatsAction.StatsResponses(Collections.emptyList(), Collections.emptyList(), statsResponses)); + listener.onResponse(new FollowStatsAction.StatsResponses(List.of(), List.of(), statsResponses)); return null; }).when(client).execute(Mockito.eq(FollowStatsAction.INSTANCE), Mockito.any(), Mockito.any()); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForIndexingCompleteStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForIndexingCompleteStepTests.java index 2f91393b451d..a0982e72b11a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForIndexingCompleteStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/WaitForIndexingCompleteStepTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.xpack.core.ilm.Step.StepKey; -import java.util.Collections; +import java.util.Map; import static org.elasticsearch.xpack.core.ilm.UnfollowAction.CCR_METADATA_KEY; import static org.hamcrest.Matchers.equalTo; @@ -54,7 +54,7 @@ public class WaitForIndexingCompleteStepTests extends AbstractStepTestCase TestLifecycleType.INSTANCE) ) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/action/PutLifecycleRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/action/PutLifecycleRequestTests.java index feb5ca24a021..b87a4e41258b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/action/PutLifecycleRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/action/PutLifecycleRequestTests.java @@ -34,7 +34,6 @@ import org.elasticsearch.xpack.core.ilm.WaitForSnapshotAction; import org.junit.Before; import java.util.ArrayList; -import java.util.Arrays; import java.util.List; public class PutLifecycleRequestTests extends AbstractXContentSerializingTestCase { @@ -78,7 +77,7 @@ public class PutLifecycleRequestTests extends AbstractXContentSerializingTestCas @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { return new NamedWriteableRegistry( - Arrays.asList( + List.of( new NamedWriteableRegistry.Entry( LifecycleType.class, TimeseriesLifecycleType.TYPE, @@ -105,7 +104,7 @@ public class PutLifecycleRequestTests extends AbstractXContentSerializingTestCas protected NamedXContentRegistry xContentRegistry() { List entries = new ArrayList<>(ClusterModule.getNamedXWriteables()); entries.addAll( - Arrays.asList( + List.of( new NamedXContentRegistry.Entry( LifecycleType.class, new ParseField(TimeseriesLifecycleType.TYPE), diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/action/RemoveIndexLifecyclePolicyResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/action/RemoveIndexLifecyclePolicyResponseTests.java index 76f4d732f4ae..44fed3d4b488 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/action/RemoveIndexLifecyclePolicyResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/action/RemoveIndexLifecyclePolicyResponseTests.java @@ -14,15 +14,13 @@ import org.elasticsearch.xpack.core.ilm.action.RemoveIndexLifecyclePolicyAction. import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; import java.util.List; public class RemoveIndexLifecyclePolicyResponseTests extends AbstractXContentSerializingTestCase { @Override protected Response createTestInstance() { - List failedIndexes = Arrays.asList(generateRandomStringArray(20, 20, false)); + List failedIndexes = List.of(generateRandomStringArray(20, 20, false)); return new Response(failedIndexes); } @@ -35,7 +33,7 @@ public class RemoveIndexLifecyclePolicyResponseTests extends AbstractXContentSer protected Response mutateInstance(Response instance) { List failedIndices = randomValueOtherThan( instance.getFailedIndexes(), - () -> Arrays.asList(generateRandomStringArray(20, 20, false)) + () -> List.of(generateRandomStringArray(20, 20, false)) ); return new Response(failedIndices); } @@ -53,7 +51,7 @@ public class RemoveIndexLifecyclePolicyResponseTests extends AbstractXContentSer public void testHasFailures() { Response response = new Response(new ArrayList<>()); assertFalse(response.hasFailures()); - assertEquals(Collections.emptyList(), response.getFailedIndexes()); + assertEquals(List.of(), response.getFailedIndexes()); int size = randomIntBetween(1, 10); List failedIndexes = new ArrayList<>(size); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java index 9663e41a647a..bfac286bc3c3 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java @@ -614,7 +614,8 @@ public class SSLServiceTests extends ESTestCase { "xpack.security.authc.realms.ldap.realm1.ssl", "xpack.security.authc.realms.saml.realm2.ssl", "xpack.monitoring.exporters.mon1.ssl", - "xpack.monitoring.exporters.mon2.ssl" }; + "xpack.monitoring.exporters.mon2.ssl", + "xpack.inference.elastic.http.ssl" }; assumeTrue("Not enough cipher suites are available to support this test", getCipherSuites.length >= contextNames.length); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformStateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformStateTests.java index e01549032be5..0acf46edc4bd 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformStateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformStateTests.java @@ -7,9 +7,6 @@ package org.elasticsearch.xpack.core.transform.transforms; -import org.elasticsearch.TransportVersions; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.AbstractXContentSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; @@ -66,39 +63,4 @@ public class TransformStateTests extends AbstractXContentSerializingTestCase getRandomFieldsExcludeFilter() { return field -> field.isEmpty() == false; } - - public void testBackwardsSerialization() throws IOException { - TransformState state = new TransformState( - randomFrom(TransformTaskState.values()), - randomFrom(IndexerState.values()), - TransformIndexerPositionTests.randomTransformIndexerPosition(), - randomLongBetween(0, 10), - randomBoolean() ? null : randomAlphaOfLength(10), - randomBoolean() ? null : randomTransformProgress(), - randomBoolean() ? null : randomNodeAttributes(), - false, - randomBoolean() ? null : AuthorizationStateTests.randomAuthorizationState() - ); - // auth_state will be null after BWC deserialization - TransformState expectedState = new TransformState( - state.getTaskState(), - state.getIndexerState(), - state.getPosition(), - state.getCheckpoint(), - state.getReason(), - state.getProgress(), - state.getNode(), - state.shouldStopAtNextCheckpoint(), - null - ); - try (BytesStreamOutput output = new BytesStreamOutput()) { - output.setTransportVersion(TransportVersions.V_7_5_0); - state.writeTo(output); - try (StreamInput in = output.bytes().streamInput()) { - in.setTransportVersion(TransportVersions.V_7_5_0); - TransformState streamedState = new TransformState(in); - assertEquals(expectedState, streamedState); - } - } - } } diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java index 7476a2fdf27c..9b515f062ca0 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java @@ -366,7 +366,7 @@ public class DeprecationInfoAction extends ActionType implements IndicesRequest.Replaceable { - private static final IndicesOptions INDICES_OPTIONS = IndicesOptions.fromOptions(false, true, true, true); + private static final IndicesOptions INDICES_OPTIONS = IndicesOptions.fromOptions(false, true, true, true, true); private String[] indices; public Request(TimeValue masterNodeTimeout, String... indices) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java index 9b53e6558f4d..191d6443264c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BlockHash.java @@ -180,13 +180,16 @@ public abstract class BlockHash implements Releasable, SeenGroupIds { List groups, AggregatorMode aggregatorMode, BlockFactory blockFactory, - AnalysisRegistry analysisRegistry + AnalysisRegistry analysisRegistry, + int emitBatchSize ) { - if (groups.size() != 1) { - throw new IllegalArgumentException("only a single CATEGORIZE group can used"); + if (groups.size() == 1) { + return new CategorizeBlockHash(blockFactory, groups.get(0).channel, aggregatorMode, analysisRegistry); + } else { + assert groups.get(0).isCategorize(); + assert groups.subList(1, groups.size()).stream().noneMatch(GroupSpec::isCategorize); + return new CategorizePackedValuesBlockHash(groups, blockFactory, aggregatorMode, analysisRegistry, emitBatchSize); } - - return new CategorizeBlockHash(blockFactory, groups.get(0).channel, aggregatorMode, analysisRegistry); } /** diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHash.java index 35c6faf84e62..f83776fbdbc8 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHash.java @@ -44,7 +44,7 @@ import java.util.Map; import java.util.Objects; /** - * Base BlockHash implementation for {@code Categorize} grouping function. + * BlockHash implementation for {@code Categorize} grouping function. */ public class CategorizeBlockHash extends BlockHash { @@ -53,11 +53,9 @@ public class CategorizeBlockHash extends BlockHash { ); private static final int NULL_ORD = 0; - // TODO: this should probably also take an emitBatchSize private final int channel; private final AggregatorMode aggregatorMode; private final TokenListCategorizer.CloseableTokenListCategorizer categorizer; - private final CategorizeEvaluator evaluator; /** @@ -95,12 +93,14 @@ public class CategorizeBlockHash extends BlockHash { } } + boolean seenNull() { + return seenNull; + } + @Override public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { - if (aggregatorMode.isInputPartial() == false) { - addInitial(page, addInput); - } else { - addIntermediate(page, addInput); + try (IntBlock block = add(page)) { + addInput.add(0, block); } } @@ -129,50 +129,38 @@ public class CategorizeBlockHash extends BlockHash { Releasables.close(evaluator, categorizer); } + private IntBlock add(Page page) { + return aggregatorMode.isInputPartial() == false ? addInitial(page) : addIntermediate(page); + } + /** * Adds initial (raw) input to the state. */ - private void addInitial(Page page, GroupingAggregatorFunction.AddInput addInput) { - try (IntBlock result = (IntBlock) evaluator.eval(page.getBlock(channel))) { - addInput.add(0, result); - } + IntBlock addInitial(Page page) { + return (IntBlock) evaluator.eval(page.getBlock(channel)); } /** * Adds intermediate state to the state. */ - private void addIntermediate(Page page, GroupingAggregatorFunction.AddInput addInput) { + private IntBlock addIntermediate(Page page) { if (page.getPositionCount() == 0) { - return; + return null; } BytesRefBlock categorizerState = page.getBlock(channel); if (categorizerState.areAllValuesNull()) { seenNull = true; - try (var newIds = blockFactory.newConstantIntVector(NULL_ORD, 1)) { - addInput.add(0, newIds); - } - return; - } - - Map idMap = readIntermediate(categorizerState.getBytesRef(0, new BytesRef())); - try (IntBlock.Builder newIdsBuilder = blockFactory.newIntBlockBuilder(idMap.size())) { - int fromId = idMap.containsKey(0) ? 0 : 1; - int toId = fromId + idMap.size(); - for (int i = fromId; i < toId; i++) { - newIdsBuilder.appendInt(idMap.get(i)); - } - try (IntBlock newIds = newIdsBuilder.build()) { - addInput.add(0, newIds); - } + return blockFactory.newConstantIntBlockWith(NULL_ORD, 1); } + return recategorize(categorizerState.getBytesRef(0, new BytesRef()), null).asBlock(); } /** - * Read intermediate state from a block. - * - * @return a map from the old category id to the new one. The old ids go from 0 to {@code size - 1}. + * Reads the intermediate state from a block and recategorizes the provided IDs. + * If no IDs are provided, the IDs are the IDs in the categorizer's state in order. + * (So 0...N-1 or 1...N, depending on whether null is present.) */ - private Map readIntermediate(BytesRef bytes) { + IntVector recategorize(BytesRef bytes, IntVector ids) { Map idMap = new HashMap<>(); try (StreamInput in = new BytesArray(bytes).streamInput()) { if (in.readBoolean()) { @@ -185,10 +173,22 @@ public class CategorizeBlockHash extends BlockHash { // +1 because the 0 ordinal is reserved for null idMap.put(oldCategoryId + 1, newCategoryId + 1); } - return idMap; } catch (IOException e) { throw new RuntimeException(e); } + try (IntVector.Builder newIdsBuilder = blockFactory.newIntVectorBuilder(idMap.size())) { + if (ids == null) { + int idOffset = idMap.containsKey(0) ? 0 : 1; + for (int i = 0; i < idMap.size(); i++) { + newIdsBuilder.appendInt(idMap.get(i + idOffset)); + } + } else { + for (int i = 0; i < ids.getPositionCount(); i++) { + newIdsBuilder.appendInt(idMap.get(ids.getInt(i))); + } + } + return newIdsBuilder.build(); + } } /** @@ -198,15 +198,20 @@ public class CategorizeBlockHash extends BlockHash { if (categorizer.getCategoryCount() == 0) { return blockFactory.newConstantNullBlock(seenNull ? 1 : 0); } + int positionCount = categorizer.getCategoryCount() + (seenNull ? 1 : 0); + // We're returning a block with N positions just because the Page must have all blocks with the same position count! + return blockFactory.newConstantBytesRefBlockWith(serializeCategorizer(), positionCount); + } + + BytesRef serializeCategorizer() { + // TODO: This BytesStreamOutput is not accounted for by the circuit breaker. Fix that! try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeBoolean(seenNull); out.writeVInt(categorizer.getCategoryCount()); for (SerializableTokenListCategory category : categorizer.toCategoriesById()) { category.writeTo(out); } - // We're returning a block with N positions just because the Page must have all blocks with the same position count! - int positionCount = categorizer.getCategoryCount() + (seenNull ? 1 : 0); - return blockFactory.newConstantBytesRefBlockWith(out.bytes().toBytesRef(), positionCount); + return out.bytes().toBytesRef(); } catch (IOException e) { throw new RuntimeException(e); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHash.java new file mode 100644 index 000000000000..20874cb10ceb --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHash.java @@ -0,0 +1,170 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.blockhash; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.BitArray; +import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.ReleasableIterator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.index.analysis.AnalysisRegistry; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +/** + * BlockHash implementation for {@code Categorize} grouping function as first + * grouping expression, followed by one or mode other grouping expressions. + *

    + * For the first grouping (the {@code Categorize} grouping function), a + * {@code CategorizeBlockHash} is used, which outputs integers (category IDs). + * Next, a {@code PackedValuesBlockHash} is used on the category IDs and the + * other groupings (which are not {@code Categorize}s). + */ +public class CategorizePackedValuesBlockHash extends BlockHash { + + private final List specs; + private final AggregatorMode aggregatorMode; + private final Block[] blocks; + private final CategorizeBlockHash categorizeBlockHash; + private final PackedValuesBlockHash packedValuesBlockHash; + + CategorizePackedValuesBlockHash( + List specs, + BlockFactory blockFactory, + AggregatorMode aggregatorMode, + AnalysisRegistry analysisRegistry, + int emitBatchSize + ) { + super(blockFactory); + this.specs = specs; + this.aggregatorMode = aggregatorMode; + blocks = new Block[specs.size()]; + + List delegateSpecs = new ArrayList<>(); + delegateSpecs.add(new GroupSpec(0, ElementType.INT)); + for (int i = 1; i < specs.size(); i++) { + delegateSpecs.add(new GroupSpec(i, specs.get(i).elementType())); + } + + boolean success = false; + try { + categorizeBlockHash = new CategorizeBlockHash(blockFactory, specs.get(0).channel(), aggregatorMode, analysisRegistry); + packedValuesBlockHash = new PackedValuesBlockHash(delegateSpecs, blockFactory, emitBatchSize); + success = true; + } finally { + if (success == false) { + close(); + } + } + } + + @Override + public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { + try (IntBlock categories = getCategories(page)) { + blocks[0] = categories; + for (int i = 1; i < specs.size(); i++) { + blocks[i] = page.getBlock(specs.get(i).channel()); + } + packedValuesBlockHash.add(new Page(blocks), addInput); + } + } + + private IntBlock getCategories(Page page) { + if (aggregatorMode.isInputPartial() == false) { + return categorizeBlockHash.addInitial(page); + } else { + BytesRefBlock stateBlock = page.getBlock(0); + BytesRef stateBytes = stateBlock.getBytesRef(0, new BytesRef()); + try (StreamInput in = new BytesArray(stateBytes).streamInput()) { + BytesRef categorizerState = in.readBytesRef(); + try (IntVector ids = IntVector.readFrom(blockFactory, in)) { + return categorizeBlockHash.recategorize(categorizerState, ids).asBlock(); + } + } catch (IOException e) { + throw new RuntimeException(e); + } + } + } + + @Override + public Block[] getKeys() { + Block[] keys = packedValuesBlockHash.getKeys(); + if (aggregatorMode.isOutputPartial() == false) { + // For final output, the keys are the category regexes. + try ( + BytesRefBlock regexes = (BytesRefBlock) categorizeBlockHash.getKeys()[0]; + BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(keys[0].getPositionCount()) + ) { + IntVector idsVector = (IntVector) keys[0].asVector(); + int idsOffset = categorizeBlockHash.seenNull() ? 0 : -1; + BytesRef scratch = new BytesRef(); + for (int i = 0; i < idsVector.getPositionCount(); i++) { + int id = idsVector.getInt(i); + if (id == 0) { + builder.appendNull(); + } else { + builder.appendBytesRef(regexes.getBytesRef(id + idsOffset, scratch)); + } + } + keys[0].close(); + keys[0] = builder.build(); + } + } else { + // For intermediate output, the keys are the delegate PackedValuesBlockHash's + // keys, with the category IDs replaced by the categorizer's internal state + // together with the list of category IDs. + BytesRef state; + // TODO: This BytesStreamOutput is not accounted for by the circuit breaker. Fix that! + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeBytesRef(categorizeBlockHash.serializeCategorizer()); + ((IntVector) keys[0].asVector()).writeTo(out); + state = out.bytes().toBytesRef(); + } catch (IOException e) { + throw new RuntimeException(e); + } + keys[0].close(); + keys[0] = blockFactory.newConstantBytesRefBlockWith(state, keys[0].getPositionCount()); + } + return keys; + } + + @Override + public IntVector nonEmpty() { + return packedValuesBlockHash.nonEmpty(); + } + + @Override + public BitArray seenGroupIds(BigArrays bigArrays) { + return packedValuesBlockHash.seenGroupIds(bigArrays); + } + + @Override + public final ReleasableIterator lookup(Page page, ByteSizeValue targetBlockSize) { + throw new UnsupportedOperationException(); + } + + @Override + public void close() { + Releasables.close(categorizeBlockHash, packedValuesBlockHash); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java index 6f8386ec08de..ccddfdf5cc74 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/HashAggregationOperator.java @@ -51,7 +51,13 @@ public class HashAggregationOperator implements Operator { if (groups.stream().anyMatch(BlockHash.GroupSpec::isCategorize)) { return new HashAggregationOperator( aggregators, - () -> BlockHash.buildCategorizeBlockHash(groups, aggregatorMode, driverContext.blockFactory(), analysisRegistry), + () -> BlockHash.buildCategorizeBlockHash( + groups, + aggregatorMode, + driverContext.blockFactory(), + analysisRegistry, + maxPageSize + ), driverContext ); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java index f8428b7c3356..587deda650a2 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java @@ -130,9 +130,6 @@ public class CategorizeBlockHashTests extends BlockHashTestCase { } finally { page.releaseBlocks(); } - - // TODO: randomize values? May give wrong results - // TODO: assert the categorizer state after adding pages. } public void testCategorizeRawMultivalue() { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHashTests.java new file mode 100644 index 000000000000..cfa023af3d18 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHashTests.java @@ -0,0 +1,248 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.aggregation.blockhash; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.analysis.common.CommonAnalysisPlugin; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.collect.Iterators; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.compute.aggregation.ValuesBytesRefAggregatorFunctionSupplier; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.CannedSourceOperator; +import org.elasticsearch.compute.operator.Driver; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.HashAggregationOperator; +import org.elasticsearch.compute.operator.LocalSourceOperator; +import org.elasticsearch.compute.operator.PageConsumerOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.index.analysis.AnalysisRegistry; +import org.elasticsearch.indices.analysis.AnalysisModule; +import org.elasticsearch.plugins.scanners.StablePluginsRegistry; +import org.elasticsearch.xpack.ml.MachineLearning; +import org.junit.Before; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.compute.operator.OperatorTestCase.runDriver; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; + +public class CategorizePackedValuesBlockHashTests extends BlockHashTestCase { + + private AnalysisRegistry analysisRegistry; + + @Before + private void initAnalysisRegistry() throws IOException { + analysisRegistry = new AnalysisModule( + TestEnvironment.newEnvironment( + Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build() + ), + List.of(new MachineLearning(Settings.EMPTY), new CommonAnalysisPlugin()), + new StablePluginsRegistry() + ).getAnalysisRegistry(); + } + + public void testCategorize_withDriver() { + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofMb(256)).withCircuitBreaking(); + CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); + DriverContext driverContext = new DriverContext(bigArrays, new BlockFactory(breaker, bigArrays)); + boolean withNull = randomBoolean(); + boolean withMultivalues = randomBoolean(); + + List groupSpecs = List.of( + new BlockHash.GroupSpec(0, ElementType.BYTES_REF, true), + new BlockHash.GroupSpec(1, ElementType.INT, false) + ); + + LocalSourceOperator.BlockSupplier input1 = () -> { + try ( + BytesRefBlock.Builder messagesBuilder = driverContext.blockFactory().newBytesRefBlockBuilder(10); + IntBlock.Builder idsBuilder = driverContext.blockFactory().newIntBlockBuilder(10) + ) { + if (withMultivalues) { + messagesBuilder.beginPositionEntry(); + } + messagesBuilder.appendBytesRef(new BytesRef("connected to 1.1.1")); + messagesBuilder.appendBytesRef(new BytesRef("connected to 1.1.2")); + if (withMultivalues) { + messagesBuilder.endPositionEntry(); + } + idsBuilder.appendInt(7); + if (withMultivalues == false) { + idsBuilder.appendInt(7); + } + + messagesBuilder.appendBytesRef(new BytesRef("connected to 1.1.3")); + messagesBuilder.appendBytesRef(new BytesRef("connection error")); + messagesBuilder.appendBytesRef(new BytesRef("connection error")); + messagesBuilder.appendBytesRef(new BytesRef("connected to 1.1.4")); + idsBuilder.appendInt(42); + idsBuilder.appendInt(7); + idsBuilder.appendInt(42); + idsBuilder.appendInt(7); + + if (withNull) { + messagesBuilder.appendNull(); + idsBuilder.appendInt(43); + } + return new Block[] { messagesBuilder.build(), idsBuilder.build() }; + } + }; + LocalSourceOperator.BlockSupplier input2 = () -> { + try ( + BytesRefBlock.Builder messagesBuilder = driverContext.blockFactory().newBytesRefBlockBuilder(10); + IntBlock.Builder idsBuilder = driverContext.blockFactory().newIntBlockBuilder(10) + ) { + messagesBuilder.appendBytesRef(new BytesRef("connected to 2.1.1")); + messagesBuilder.appendBytesRef(new BytesRef("connected to 2.1.2")); + messagesBuilder.appendBytesRef(new BytesRef("disconnected")); + messagesBuilder.appendBytesRef(new BytesRef("connection error")); + idsBuilder.appendInt(111); + idsBuilder.appendInt(7); + idsBuilder.appendInt(7); + idsBuilder.appendInt(42); + if (withNull) { + messagesBuilder.appendNull(); + idsBuilder.appendNull(); + } + return new Block[] { messagesBuilder.build(), idsBuilder.build() }; + } + }; + + List intermediateOutput = new ArrayList<>(); + + Driver driver = new Driver( + driverContext, + new LocalSourceOperator(input1), + List.of( + new HashAggregationOperator.HashAggregationOperatorFactory( + groupSpecs, + AggregatorMode.INITIAL, + List.of(new ValuesBytesRefAggregatorFunctionSupplier(List.of(0)).groupingAggregatorFactory(AggregatorMode.INITIAL)), + 16 * 1024, + analysisRegistry + ).get(driverContext) + ), + new PageConsumerOperator(intermediateOutput::add), + () -> {} + ); + runDriver(driver); + + driver = new Driver( + driverContext, + new LocalSourceOperator(input2), + List.of( + new HashAggregationOperator.HashAggregationOperatorFactory( + groupSpecs, + AggregatorMode.INITIAL, + List.of(new ValuesBytesRefAggregatorFunctionSupplier(List.of(0)).groupingAggregatorFactory(AggregatorMode.INITIAL)), + 16 * 1024, + analysisRegistry + ).get(driverContext) + ), + new PageConsumerOperator(intermediateOutput::add), + () -> {} + ); + runDriver(driver); + + List finalOutput = new ArrayList<>(); + + driver = new Driver( + driverContext, + new CannedSourceOperator(intermediateOutput.iterator()), + List.of( + new HashAggregationOperator.HashAggregationOperatorFactory( + groupSpecs, + AggregatorMode.FINAL, + List.of(new ValuesBytesRefAggregatorFunctionSupplier(List.of(2)).groupingAggregatorFactory(AggregatorMode.FINAL)), + 16 * 1024, + analysisRegistry + ).get(driverContext) + ), + new PageConsumerOperator(finalOutput::add), + () -> {} + ); + runDriver(driver); + + assertThat(finalOutput, hasSize(1)); + assertThat(finalOutput.get(0).getBlockCount(), equalTo(3)); + BytesRefBlock outputMessages = finalOutput.get(0).getBlock(0); + IntBlock outputIds = finalOutput.get(0).getBlock(1); + BytesRefBlock outputValues = finalOutput.get(0).getBlock(2); + assertThat(outputIds.getPositionCount(), equalTo(outputMessages.getPositionCount())); + assertThat(outputValues.getPositionCount(), equalTo(outputMessages.getPositionCount())); + Map>> result = new HashMap<>(); + for (int i = 0; i < outputMessages.getPositionCount(); i++) { + BytesRef messageBytesRef = ((BytesRef) BlockUtils.toJavaObject(outputMessages, i)); + String message = messageBytesRef == null ? null : messageBytesRef.utf8ToString(); + result.computeIfAbsent(message, key -> new HashMap<>()); + + Integer id = (Integer) BlockUtils.toJavaObject(outputIds, i); + result.get(message).computeIfAbsent(id, key -> new HashSet<>()); + + Object values = BlockUtils.toJavaObject(outputValues, i); + if (values == null) { + result.get(message).get(id).add(null); + } else { + if ((values instanceof List) == false) { + values = List.of(values); + } + for (Object valueObject : (List) values) { + BytesRef value = (BytesRef) valueObject; + result.get(message).get(id).add(value.utf8ToString()); + } + } + } + Releasables.close(() -> Iterators.map(finalOutput.iterator(), (Page p) -> p::releaseBlocks)); + + Map>> expectedResult = Map.of( + ".*?connected.+?to.*?", + Map.of( + 7, + Set.of("connected to 1.1.1", "connected to 1.1.2", "connected to 1.1.4", "connected to 2.1.2"), + 42, + Set.of("connected to 1.1.3"), + 111, + Set.of("connected to 2.1.1") + ), + ".*?connection.+?error.*?", + Map.of(7, Set.of("connection error"), 42, Set.of("connection error")), + ".*?disconnected.*?", + Map.of(7, Set.of("disconnected")) + ); + if (withNull) { + expectedResult = new HashMap<>(expectedResult); + expectedResult.put(null, new HashMap<>()); + expectedResult.get(null).put(null, new HashSet<>()); + expectedResult.get(null).get(null).add(null); + expectedResult.get(null).put(43, new HashSet<>()); + expectedResult.get(null).get(43).add(null); + } + assertThat(result, equalTo(expectedResult)); + } +} diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle b/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle index eac5d5764d4b..971eed1c6795 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle @@ -27,6 +27,7 @@ restResources { dependencies { javaRestTestImplementation project(xpackModule('esql:qa:testFixtures')) javaRestTestImplementation project(xpackModule('esql:qa:server')) + javaRestTestImplementation project(xpackModule('esql')) } GradleUtils.extendSourceSet(project, "javaRestTest", "yamlRestTest") diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java index 801e1d12b1d4..81070b3155f2 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/mixed/MixedClusterEsqlSpecIT.java @@ -18,8 +18,10 @@ import org.junit.Before; import org.junit.ClassRule; import java.io.IOException; +import java.util.List; import static org.elasticsearch.xpack.esql.CsvTestUtils.isEnabled; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V4; import static org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase.Mode.ASYNC; public class MixedClusterEsqlSpecIT extends EsqlSpecTestCase { @@ -92,6 +94,11 @@ public class MixedClusterEsqlSpecIT extends EsqlSpecTestCase { return false; } + @Override + protected boolean supportsIndexModeLookup() throws IOException { + return hasCapabilities(List.of(JOIN_LOOKUP_V4.capabilityName())); + } + @Override protected boolean deduplicateExactWarnings() { /* diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java index e658d169cbce..2ec75683ab14 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -280,4 +280,11 @@ public class MultiClusterSpecIT extends EsqlSpecTestCase { protected boolean supportsInferenceTestService() { return false; } + + @Override + protected boolean supportsIndexModeLookup() throws IOException { + // CCS does not yet support JOIN_LOOKUP_V4 and clusters falsely report they have this capability + // return hasCapabilities(List.of(JOIN_LOOKUP_V4.capabilityName())); + return false; + } } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index 2484a428c4b0..e7bce73e21cd 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -135,8 +135,8 @@ public abstract class EsqlSpecTestCase extends ESRestTestCase { createInferenceEndpoint(client()); } - if (indexExists(availableDatasetsForEs(client()).iterator().next().indexName()) == false) { - loadDataSetIntoEs(client()); + if (indexExists(availableDatasetsForEs(client(), supportsIndexModeLookup()).iterator().next().indexName()) == false) { + loadDataSetIntoEs(client(), supportsIndexModeLookup()); } } @@ -182,12 +182,30 @@ public abstract class EsqlSpecTestCase extends ESRestTestCase { protected static void checkCapabilities(RestClient client, TestFeatureService testFeatureService, String testName, CsvTestCase testCase) throws IOException { - if (testCase.requiredCapabilities.isEmpty()) { + if (hasCapabilities(client, testCase.requiredCapabilities)) { return; } + + var features = new EsqlFeatures().getFeatures().stream().map(NodeFeature::id).collect(Collectors.toSet()); + + for (String feature : testCase.requiredCapabilities) { + var esqlFeature = "esql." + feature; + assumeTrue("Requested capability " + feature + " is an ESQL cluster feature", features.contains(esqlFeature)); + assumeTrue("Test " + testName + " requires " + feature, testFeatureService.clusterHasFeature(esqlFeature)); + } + } + + protected static boolean hasCapabilities(List requiredCapabilities) throws IOException { + return hasCapabilities(adminClient(), requiredCapabilities); + } + + protected static boolean hasCapabilities(RestClient client, List requiredCapabilities) throws IOException { + if (requiredCapabilities.isEmpty()) { + return true; + } try { - if (clusterHasCapability(client, "POST", "/_query", List.of(), testCase.requiredCapabilities).orElse(false)) { - return; + if (clusterHasCapability(client, "POST", "/_query", List.of(), requiredCapabilities).orElse(false)) { + return true; } LOGGER.info("capabilities API returned false, we might be in a mixed version cluster so falling back to cluster features"); } catch (ResponseException e) { @@ -206,20 +224,17 @@ public abstract class EsqlSpecTestCase extends ESRestTestCase { throw e; } } - - var features = new EsqlFeatures().getFeatures().stream().map(NodeFeature::id).collect(Collectors.toSet()); - - for (String feature : testCase.requiredCapabilities) { - var esqlFeature = "esql." + feature; - assumeTrue("Requested capability " + feature + " is an ESQL cluster feature", features.contains(esqlFeature)); - assumeTrue("Test " + testName + " requires " + feature, testFeatureService.clusterHasFeature(esqlFeature)); - } + return false; } protected boolean supportsInferenceTestService() { return true; } + protected boolean supportsIndexModeLookup() throws IOException { + return true; + } + protected final void doTest() throws Throwable { RequestObjectBuilder builder = new RequestObjectBuilder(randomFrom(XContentType.values())); diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java index 63c184e973cd..588d5870d89e 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java @@ -46,7 +46,7 @@ public abstract class GenerativeRestTest extends ESRestTestCase { @Before public void setup() throws IOException { if (indexExists(CSV_DATASET_MAP.keySet().iterator().next()) == false) { - loadDataSetIntoEs(client()); + loadDataSetIntoEs(client(), true); } } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java index 34af1edb9f99..dbeb54996733 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java @@ -235,7 +235,7 @@ public class CsvTestsDataLoader { } try (RestClient client = builder.build()) { - loadDataSetIntoEs(client, (restClient, indexName, indexMapping, indexSettings) -> { + loadDataSetIntoEs(client, true, (restClient, indexName, indexMapping, indexSettings) -> { // don't use ESRestTestCase methods here or, if you do, test running the main method before making the change StringBuilder jsonBody = new StringBuilder("{"); if (indexSettings != null && indexSettings.isEmpty() == false) { @@ -254,26 +254,28 @@ public class CsvTestsDataLoader { } } - public static Set availableDatasetsForEs(RestClient client) throws IOException { + public static Set availableDatasetsForEs(RestClient client, boolean supportsIndexModeLookup) throws IOException { boolean inferenceEnabled = clusterHasInferenceEndpoint(client); return CSV_DATASET_MAP.values() .stream() .filter(d -> d.requiresInferenceEndpoint == false || inferenceEnabled) + .filter(d -> supportsIndexModeLookup || d.indexName.endsWith("_lookup") == false) // TODO: use actual index settings .collect(Collectors.toCollection(HashSet::new)); } - public static void loadDataSetIntoEs(RestClient client) throws IOException { - loadDataSetIntoEs(client, (restClient, indexName, indexMapping, indexSettings) -> { + public static void loadDataSetIntoEs(RestClient client, boolean supportsIndexModeLookup) throws IOException { + loadDataSetIntoEs(client, supportsIndexModeLookup, (restClient, indexName, indexMapping, indexSettings) -> { ESRestTestCase.createIndex(restClient, indexName, indexSettings, indexMapping, null); }); } - private static void loadDataSetIntoEs(RestClient client, IndexCreator indexCreator) throws IOException { + private static void loadDataSetIntoEs(RestClient client, boolean supportsIndexModeLookup, IndexCreator indexCreator) + throws IOException { Logger logger = LogManager.getLogger(CsvTestsDataLoader.class); Set loadedDatasets = new HashSet<>(); - for (var dataset : availableDatasetsForEs(client)) { + for (var dataset : availableDatasetsForEs(client, supportsIndexModeLookup)) { load(client, dataset, logger, indexCreator); loadedDatasets.add(dataset.indexName); } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/categorize.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/categorize.csv-spec index 4ce43961a707..5ad62dd7a21a 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/categorize.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/categorize.csv-spec @@ -60,6 +60,19 @@ COUNT():long | VALUES(str):keyword | category:keyword 1 | [a, b, c] | .*?disconnected.*? ; +limit before stats +required_capability: categorize_v5 + +FROM sample_data | SORT message | LIMIT 4 + | STATS count=COUNT() BY category=CATEGORIZE(message) + | SORT category +; + +count:long | category:keyword + 3 | .*?Connected.+?to.*? + 1 | .*?Connection.+?error.*? +; + skips stopwords required_capability: categorize_v5 @@ -615,3 +628,159 @@ COUNT():long | x:keyword 3 | [.*?Connection.+?error.*?,.*?Connection.+?error.*?] 1 | [.*?Disconnected.*?,.*?Disconnected.*?] ; + +multiple groupings with categorize and ip +required_capability: categorize_multiple_groupings + +FROM sample_data + | STATS count=COUNT() BY category=CATEGORIZE(message), client_ip + | SORT category, client_ip +; + +count:long | category:keyword | client_ip:ip + 1 | .*?Connected.+?to.*? | 172.21.2.113 + 1 | .*?Connected.+?to.*? | 172.21.2.162 + 1 | .*?Connected.+?to.*? | 172.21.3.15 + 3 | .*?Connection.+?error.*? | 172.21.3.15 + 1 | .*?Disconnected.*? | 172.21.0.5 +; + +multiple groupings with categorize and bucketed timestamp +required_capability: categorize_multiple_groupings + +FROM sample_data + | STATS count=COUNT() BY category=CATEGORIZE(message), timestamp=BUCKET(@timestamp, 1 HOUR) + | SORT category, timestamp +; + +count:long | category:keyword | timestamp:datetime + 2 | .*?Connected.+?to.*? | 2023-10-23T12:00:00.000Z + 1 | .*?Connected.+?to.*? | 2023-10-23T13:00:00.000Z + 3 | .*?Connection.+?error.*? | 2023-10-23T13:00:00.000Z + 1 | .*?Disconnected.*? | 2023-10-23T13:00:00.000Z +; + + +multiple groupings with categorize and limit before stats +required_capability: categorize_multiple_groupings + +FROM sample_data | SORT message | LIMIT 5 + | STATS count=COUNT() BY category=CATEGORIZE(message), client_ip + | SORT category, client_ip +; + +count:long | category:keyword | client_ip:ip + 1 | .*?Connected.+?to.*? | 172.21.2.113 + 1 | .*?Connected.+?to.*? | 172.21.2.162 + 1 | .*?Connected.+?to.*? | 172.21.3.15 + 2 | .*?Connection.+?error.*? | 172.21.3.15 +; + +multiple groupings with categorize and nulls +required_capability: categorize_multiple_groupings + +FROM employees + | STATS SUM(languages) BY category=CATEGORIZE(job_positions), gender + | SORT category DESC, gender ASC + | LIMIT 5 +; + +SUM(languages):long | category:keyword | gender:keyword + 11 | null | F + 16 | null | M + 14 | .*?Tech.+?Lead.*? | F + 23 | .*?Tech.+?Lead.*? | M + 9 | .*?Tech.+?Lead.*? | null +; + +multiple groupings with categorize and a field that's always null +required_capability: categorize_multiple_groupings + +FROM sample_data + | EVAL nullfield = null + | STATS count=COUNT() BY category=CATEGORIZE(nullfield), client_ip + | SORT client_ip +; + +count:long | category:keyword | client_ip:ip + 1 | null | 172.21.0.5 + 1 | null | 172.21.2.113 + 1 | null | 172.21.2.162 + 4 | null | 172.21.3.15 +; + +multiple groupings with categorize and the same text field +required_capability: categorize_multiple_groupings + +FROM sample_data + | STATS count=COUNT() BY category=CATEGORIZE(message), message + | SORT message +; + +count:long | category:keyword | message:keyword + 1 | .*?Connected.+?to.*? | Connected to 10.1.0.1 + 1 | .*?Connected.+?to.*? | Connected to 10.1.0.2 + 1 | .*?Connected.+?to.*? | Connected to 10.1.0.3 + 3 | .*?Connection.+?error.*? | Connection error + 1 | .*?Disconnected.*? | Disconnected +; + +multiple additional complex groupings with categorize +required_capability: categorize_multiple_groupings + +FROM sample_data + | STATS count=COUNT(), duration=SUM(event_duration) BY category=CATEGORIZE(message), SUBSTRING(message, 1, 7), ip_part=TO_LONG(SUBSTRING(TO_STRING(client_ip), 8, 1)), hour=BUCKET(@timestamp, 1 HOUR) + | SORT ip_part, category +; + +count:long | duration:long | category:keyword | SUBSTRING(message, 1, 7):keyword | ip_part:long | hour:datetime + 1 | 1232382 | .*?Disconnected.*? | Disconn | 0 | 2023-10-23T13:00:00.000Z + 2 | 6215122 | .*?Connected.+?to.*? | Connect | 2 | 2023-10-23T12:00:00.000Z + 1 | 1756467 | .*?Connected.+?to.*? | Connect | 3 | 2023-10-23T13:00:00.000Z + 3 | 14027356 | .*?Connection.+?error.*? | Connect | 3 | 2023-10-23T13:00:00.000Z +; + +multiple groupings with categorize and some constants including null +required_capability: categorize_multiple_groupings + +FROM sample_data + | STATS count=MV_COUNT(VALUES(message)) BY category=CATEGORIZE(message), null, constant="constant" + | SORT category +; + +count:integer | category:keyword | null:null | constant:keyword + 3 | .*?Connected.+?to.*? | null | constant + 1 | .*?Connection.+?error.*? | null | constant + 1 | .*?Disconnected.*? | null | constant +; + +multiple groupings with categorize and aggregation filters +required_capability: categorize_multiple_groupings + +FROM employees + | STATS lang_low=AVG(languages) WHERE salary<=50000, lang_high=AVG(languages) WHERE salary>50000 BY category=CATEGORIZE(job_positions), gender + | SORT category, gender + | LIMIT 5 +; + +lang_low:double | lang_high:double | category:keyword | gender:keyword + 2.0 | 5.0 | .*?Accountant.*? | F + 3.0 | 2.5 | .*?Accountant.*? | M + 5.0 | 2.0 | .*?Accountant.*? | null + 3.0 | 3.25 | .*?Architect.*? | F + 3.75 | null | .*?Architect.*? | M +; + +multiple groupings with categorize on null row +required_capability: categorize_multiple_groupings + +ROW message = null, str = ["a", "b", "c"] + | STATS COUNT(), VALUES(str) BY category=CATEGORIZE(message), str + | SORT str +; + +COUNT():long | VALUES(str):keyword | category:keyword | str:keyword + 1 | [a, b, c] | null | a + 1 | [a, b, c] | null | b + 1 | [a, b, c] | null | c +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/scoring.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/scoring.csv-spec index d4c7b8c59fdb..cb38204a71ab 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/scoring.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/scoring.csv-spec @@ -283,3 +283,33 @@ book_no:keyword | c_score:double 7350 | 2.0 7140 | 3.0 ; + +QstrScoreManipulation +required_capability: metadata_score +required_capability: qstr_function + +from books metadata _score +| where qstr("title:rings") +| eval _score = _score + 1 +| keep book_no, title, _score +| limit 2; + +book_no:keyword | title:text | _score:double +4023 | A Tolkien Compass: Including J. R. R. Tolkien's Guide to the Names in The Lord of the Rings | 2.6404519081115723 +2714 | Return of the King Being the Third Part of The Lord of the Rings | 2.9239964485168457 +; + +QstrScoreOverride +required_capability: metadata_score +required_capability: qstr_function + +from books metadata _score +| where qstr("title:rings") +| eval _score = "foobar" +| keep book_no, title, _score +| limit 2; + +book_no:keyword | title:text | _score:keyword +4023 | A Tolkien Compass: Including J. R. R. Tolkien's Guide to the Names in The Lord of the Rings | foobar +2714 | Return of the King Being the Third Part of The Lord of the Rings | foobar +; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEnrichBasedCrossClusterTestCase.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEnrichBasedCrossClusterTestCase.java new file mode 100644 index 000000000000..66ac32b33cd4 --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEnrichBasedCrossClusterTestCase.java @@ -0,0 +1,290 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.ingest.common.IngestCommonPlugin; +import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.license.LicenseService; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.protocol.xpack.XPackInfoRequest; +import org.elasticsearch.protocol.xpack.XPackInfoResponse; +import org.elasticsearch.reindex.ReindexPlugin; +import org.elasticsearch.test.AbstractMultiClustersTestCase; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; +import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.action.TransportXPackInfoAction; +import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; +import org.elasticsearch.xpack.core.action.XPackInfoFeatureResponse; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.core.enrich.action.DeleteEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; +import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; +import org.elasticsearch.xpack.enrich.EnrichPlugin; +import org.elasticsearch.xpack.esql.EsqlTestUtils; +import org.elasticsearch.xpack.esql.plan.logical.Enrich; +import org.junit.After; +import org.junit.Before; + +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.containsString; + +public abstract class AbstractEnrichBasedCrossClusterTestCase extends AbstractMultiClustersTestCase { + + public static String REMOTE_CLUSTER_1 = "c1"; + public static String REMOTE_CLUSTER_2 = "c2"; + + /** + * subclasses should override if they don't want enrich policies wiped after each test method run + */ + protected boolean tolerateErrorsWhenWipingEnrichPolicies() { + return false; + } + + @Override + protected List remoteClusterAlias() { + return List.of(REMOTE_CLUSTER_1, REMOTE_CLUSTER_2); + } + + protected Collection allClusters() { + return CollectionUtils.appendToCopy(remoteClusterAlias(), LOCAL_CLUSTER); + } + + @Override + protected Collection> nodePlugins(String clusterAlias) { + List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias)); + plugins.add(CrossClustersEnrichIT.LocalStateEnrich.class); + plugins.add(IngestCommonPlugin.class); + plugins.add(ReindexPlugin.class); + return plugins; + } + + @Override + protected Settings nodeSettings() { + return Settings.builder().put(super.nodeSettings()).put(XPackSettings.SECURITY_ENABLED.getKey(), false).build(); + } + + static final EnrichPolicy hostPolicy = new EnrichPolicy("match", null, List.of("hosts"), "ip", List.of("ip", "os")); + static final EnrichPolicy vendorPolicy = new EnrichPolicy("match", null, List.of("vendors"), "os", List.of("os", "vendor")); + + @Before + public void setupHostsEnrich() { + // the hosts policy are identical on every node + Map allHosts = Map.of( + "192.168.1.2", + "Windows", + "192.168.1.3", + "MacOS", + "192.168.1.4", + "Linux", + "192.168.1.5", + "Android", + "192.168.1.6", + "iOS", + "192.168.1.7", + "Windows", + "192.168.1.8", + "MacOS", + "192.168.1.9", + "Linux", + "192.168.1.10", + "Linux", + "192.168.1.11", + "Windows" + ); + for (String cluster : allClusters()) { + Client client = client(cluster); + client.admin().indices().prepareCreate("hosts").setMapping("ip", "type=ip", "os", "type=keyword").get(); + for (Map.Entry h : allHosts.entrySet()) { + client.prepareIndex("hosts").setSource("ip", h.getKey(), "os", h.getValue()).get(); + } + client.admin().indices().prepareRefresh("hosts").get(); + client.execute(PutEnrichPolicyAction.INSTANCE, new PutEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, "hosts", hostPolicy)) + .actionGet(); + client.execute(ExecuteEnrichPolicyAction.INSTANCE, new ExecuteEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, "hosts")) + .actionGet(); + assertAcked(client.admin().indices().prepareDelete("hosts")); + } + } + + @Before + public void setupVendorPolicy() { + var localVendors = Map.of("Windows", "Microsoft", "MacOS", "Apple", "iOS", "Apple", "Android", "Samsung", "Linux", "Redhat"); + var c1Vendors = Map.of("Windows", "Microsoft", "MacOS", "Apple", "iOS", "Apple", "Android", "Google", "Linux", "Suse"); + var c2Vendors = Map.of("Windows", "Microsoft", "MacOS", "Apple", "iOS", "Apple", "Android", "Sony", "Linux", "Ubuntu"); + var vendors = Map.of(LOCAL_CLUSTER, localVendors, REMOTE_CLUSTER_1, c1Vendors, REMOTE_CLUSTER_2, c2Vendors); + for (Map.Entry> e : vendors.entrySet()) { + Client client = client(e.getKey()); + client.admin().indices().prepareCreate("vendors").setMapping("os", "type=keyword", "vendor", "type=keyword").get(); + for (Map.Entry v : e.getValue().entrySet()) { + client.prepareIndex("vendors").setSource("os", v.getKey(), "vendor", v.getValue()).get(); + } + client.admin().indices().prepareRefresh("vendors").get(); + client.execute(PutEnrichPolicyAction.INSTANCE, new PutEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, "vendors", vendorPolicy)) + .actionGet(); + client.execute(ExecuteEnrichPolicyAction.INSTANCE, new ExecuteEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, "vendors")) + .actionGet(); + assertAcked(client.admin().indices().prepareDelete("vendors")); + } + } + + @Before + public void setupEventsIndices() { + record Event(long timestamp, String user, String host) { + + } + List e0 = List.of( + new Event(1, "matthew", "192.168.1.3"), + new Event(2, "simon", "192.168.1.5"), + new Event(3, "park", "192.168.1.2"), + new Event(4, "andrew", "192.168.1.7"), + new Event(5, "simon", "192.168.1.20"), + new Event(6, "kevin", "192.168.1.2"), + new Event(7, "akio", "192.168.1.5"), + new Event(8, "luke", "192.168.1.2"), + new Event(9, "jack", "192.168.1.4") + ); + List e1 = List.of( + new Event(1, "andres", "192.168.1.2"), + new Event(2, "sergio", "192.168.1.6"), + new Event(3, "kylian", "192.168.1.8"), + new Event(4, "andrew", "192.168.1.9"), + new Event(5, "jack", "192.168.1.3"), + new Event(6, "kevin", "192.168.1.4"), + new Event(7, "akio", "192.168.1.7"), + new Event(8, "kevin", "192.168.1.21"), + new Event(9, "andres", "192.168.1.8") + ); + List e2 = List.of( + new Event(1, "park", "192.168.1.25"), + new Event(2, "akio", "192.168.1.5"), + new Event(3, "park", "192.168.1.2"), + new Event(4, "kevin", "192.168.1.3") + ); + for (var c : Map.of(LOCAL_CLUSTER, e0, REMOTE_CLUSTER_1, e1, REMOTE_CLUSTER_2, e2).entrySet()) { + Client client = client(c.getKey()); + client.admin() + .indices() + .prepareCreate("events") + .setMapping("timestamp", "type=long", "user", "type=keyword", "host", "type=ip") + .get(); + for (var e : c.getValue()) { + client.prepareIndex("events").setSource("timestamp", e.timestamp, "user", e.user, "host", e.host).get(); + } + client.admin().indices().prepareRefresh("events").get(); + } + } + + @After + public void wipeEnrichPolicies() { + for (String cluster : allClusters()) { + cluster(cluster).wipe(Set.of()); + for (String policy : List.of("hosts", "vendors")) { + if (tolerateErrorsWhenWipingEnrichPolicies()) { + try { + client(cluster).execute( + DeleteEnrichPolicyAction.INSTANCE, + new DeleteEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, policy) + ); + } catch (Exception e) { + assertThat(e.getMessage(), containsString("Cluster is already closed")); + } + + } else { + client(cluster).execute( + DeleteEnrichPolicyAction.INSTANCE, + new DeleteEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, policy) + ); + } + } + } + } + + static String enrichHosts(Enrich.Mode mode) { + return EsqlTestUtils.randomEnrichCommand("hosts", mode, hostPolicy.getMatchField(), hostPolicy.getEnrichFields()); + } + + static String enrichVendors(Enrich.Mode mode) { + return EsqlTestUtils.randomEnrichCommand("vendors", mode, vendorPolicy.getMatchField(), vendorPolicy.getEnrichFields()); + } + + protected EsqlQueryResponse runQuery(String query, Boolean ccsMetadataInResponse) { + EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); + request.query(query); + request.pragmas(AbstractEsqlIntegTestCase.randomPragmas()); + if (randomBoolean()) { + request.profile(true); + } + if (ccsMetadataInResponse != null) { + request.includeCCSMetadata(ccsMetadataInResponse); + } + return client(LOCAL_CLUSTER).execute(EsqlQueryAction.INSTANCE, request).actionGet(30, TimeUnit.SECONDS); + } + + public static Tuple randomIncludeCCSMetadata() { + return switch (randomIntBetween(1, 3)) { + case 1 -> new Tuple<>(Boolean.TRUE, Boolean.TRUE); + case 2 -> new Tuple<>(Boolean.FALSE, Boolean.FALSE); + case 3 -> new Tuple<>(null, Boolean.FALSE); + default -> throw new AssertionError("should not get here"); + }; + } + + public static class LocalStateEnrich extends LocalStateCompositeXPackPlugin { + public LocalStateEnrich(final Settings settings, final Path configPath) throws Exception { + super(settings, configPath); + + plugins.add(new EnrichPlugin(settings) { + @Override + protected XPackLicenseState getLicenseState() { + return this.getLicenseState(); + } + }); + } + + public static class EnrichTransportXPackInfoAction extends TransportXPackInfoAction { + @Inject + public EnrichTransportXPackInfoAction( + TransportService transportService, + ActionFilters actionFilters, + LicenseService licenseService, + NodeClient client + ) { + super(transportService, actionFilters, licenseService, client); + } + + @Override + protected List> infoActions() { + return Collections.singletonList(XPackInfoFeatureAction.ENRICH); + } + } + + @Override + protected Class> getInfoAction() { + return CrossClustersQueriesWithInvalidLicenseIT.LocalStateEnrich.EnrichTransportXPackInfoAction.class; + } + } +} diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java index c8206621de41..a2bba19db50f 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterAsyncQueryIT.java @@ -35,7 +35,6 @@ import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.core.async.DeleteAsyncResultRequest; import org.elasticsearch.xpack.core.async.GetAsyncResultRequest; import org.elasticsearch.xpack.core.async.TransportDeleteAsyncResultAction; -import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.junit.Before; import java.io.IOException; @@ -78,7 +77,7 @@ public class CrossClusterAsyncQueryIT extends AbstractMultiClustersTestCase { @Override protected Collection> nodePlugins(String clusterAlias) { List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias)); - plugins.add(EsqlPlugin.class); + plugins.add(EsqlPluginWithEnterpriseOrTrialLicense.class); plugins.add(EsqlAsyncActionIT.LocalStateEsqlAsync.class); // allows the async_search DELETE action plugins.add(InternalExchangePlugin.class); plugins.add(PauseFieldPlugin.class); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterEnrichUnavailableClustersIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterEnrichUnavailableClustersIT.java index 5c3e1974e924..09ad97b08f35 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterEnrichUnavailableClustersIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterEnrichUnavailableClustersIT.java @@ -8,36 +8,21 @@ package org.elasticsearch.xpack.esql.action; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.core.Tuple; -import org.elasticsearch.ingest.common.IngestCommonPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.reindex.ReindexPlugin; -import org.elasticsearch.test.AbstractMultiClustersTestCase; import org.elasticsearch.transport.RemoteClusterAware; -import org.elasticsearch.xpack.core.XPackSettings; -import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; -import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.plan.logical.Enrich; -import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; -import org.junit.Before; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Locale; -import java.util.Map; import java.util.Set; -import java.util.concurrent.TimeUnit; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.xpack.esql.EsqlTestUtils.getValuesList; -import static org.elasticsearch.xpack.esql.action.CrossClustersEnrichIT.enrichHosts; -import static org.elasticsearch.xpack.esql.action.CrossClustersEnrichIT.enrichVendors; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -47,151 +32,26 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo; * This IT test is the dual of CrossClustersEnrichIT, which tests "happy path" * and this one tests unavailable cluster scenarios using (most of) the same tests. */ -public class CrossClusterEnrichUnavailableClustersIT extends AbstractMultiClustersTestCase { - - public static String REMOTE_CLUSTER_1 = "c1"; - public static String REMOTE_CLUSTER_2 = "c2"; - - @Override - protected List remoteClusterAlias() { - return List.of(REMOTE_CLUSTER_1, REMOTE_CLUSTER_2); - } +public class CrossClusterEnrichUnavailableClustersIT extends AbstractEnrichBasedCrossClusterTestCase { @Override protected boolean reuseClusters() { return false; } - private Collection allClusters() { - return CollectionUtils.appendToCopy(remoteClusterAlias(), LOCAL_CLUSTER); + @Override + protected boolean tolerateErrorsWhenWipingEnrichPolicies() { + // attempt to wipe will fail since some clusters are already closed + return true; } @Override protected Collection> nodePlugins(String clusterAlias) { List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias)); - plugins.add(EsqlPlugin.class); - plugins.add(CrossClustersEnrichIT.LocalStateEnrich.class); - plugins.add(IngestCommonPlugin.class); - plugins.add(ReindexPlugin.class); + plugins.add(EsqlPluginWithEnterpriseOrTrialLicense.class); return plugins; } - @Override - protected Settings nodeSettings() { - return Settings.builder().put(super.nodeSettings()).put(XPackSettings.SECURITY_ENABLED.getKey(), false).build(); - } - - @Before - public void setupHostsEnrich() { - // the hosts policy are identical on every node - Map allHosts = Map.of( - "192.168.1.2", - "Windows", - "192.168.1.3", - "MacOS", - "192.168.1.4", - "Linux", - "192.168.1.5", - "Android", - "192.168.1.6", - "iOS", - "192.168.1.7", - "Windows", - "192.168.1.8", - "MacOS", - "192.168.1.9", - "Linux", - "192.168.1.10", - "Linux", - "192.168.1.11", - "Windows" - ); - for (String cluster : allClusters()) { - Client client = client(cluster); - client.admin().indices().prepareCreate("hosts").setMapping("ip", "type=ip", "os", "type=keyword").get(); - for (Map.Entry h : allHosts.entrySet()) { - client.prepareIndex("hosts").setSource("ip", h.getKey(), "os", h.getValue()).get(); - } - client.admin().indices().prepareRefresh("hosts").get(); - client.execute( - PutEnrichPolicyAction.INSTANCE, - new PutEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, "hosts", CrossClustersEnrichIT.hostPolicy) - ).actionGet(); - client.execute(ExecuteEnrichPolicyAction.INSTANCE, new ExecuteEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, "hosts")) - .actionGet(); - assertAcked(client.admin().indices().prepareDelete("hosts")); - } - } - - @Before - public void setupVendorPolicy() { - var localVendors = Map.of("Windows", "Microsoft", "MacOS", "Apple", "iOS", "Apple", "Android", "Samsung", "Linux", "Redhat"); - var c1Vendors = Map.of("Windows", "Microsoft", "MacOS", "Apple", "iOS", "Apple", "Android", "Google", "Linux", "Suse"); - var c2Vendors = Map.of("Windows", "Microsoft", "MacOS", "Apple", "iOS", "Apple", "Android", "Sony", "Linux", "Ubuntu"); - var vendors = Map.of(LOCAL_CLUSTER, localVendors, "c1", c1Vendors, "c2", c2Vendors); - for (Map.Entry> e : vendors.entrySet()) { - Client client = client(e.getKey()); - client.admin().indices().prepareCreate("vendors").setMapping("os", "type=keyword", "vendor", "type=keyword").get(); - for (Map.Entry v : e.getValue().entrySet()) { - client.prepareIndex("vendors").setSource("os", v.getKey(), "vendor", v.getValue()).get(); - } - client.admin().indices().prepareRefresh("vendors").get(); - client.execute( - PutEnrichPolicyAction.INSTANCE, - new PutEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, "vendors", CrossClustersEnrichIT.vendorPolicy) - ).actionGet(); - client.execute(ExecuteEnrichPolicyAction.INSTANCE, new ExecuteEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, "vendors")) - .actionGet(); - assertAcked(client.admin().indices().prepareDelete("vendors")); - } - } - - @Before - public void setupEventsIndices() { - record Event(long timestamp, String user, String host) {} - - List e0 = List.of( - new Event(1, "matthew", "192.168.1.3"), - new Event(2, "simon", "192.168.1.5"), - new Event(3, "park", "192.168.1.2"), - new Event(4, "andrew", "192.168.1.7"), - new Event(5, "simon", "192.168.1.20"), - new Event(6, "kevin", "192.168.1.2"), - new Event(7, "akio", "192.168.1.5"), - new Event(8, "luke", "192.168.1.2"), - new Event(9, "jack", "192.168.1.4") - ); - List e1 = List.of( - new Event(1, "andres", "192.168.1.2"), - new Event(2, "sergio", "192.168.1.6"), - new Event(3, "kylian", "192.168.1.8"), - new Event(4, "andrew", "192.168.1.9"), - new Event(5, "jack", "192.168.1.3"), - new Event(6, "kevin", "192.168.1.4"), - new Event(7, "akio", "192.168.1.7"), - new Event(8, "kevin", "192.168.1.21"), - new Event(9, "andres", "192.168.1.8") - ); - List e2 = List.of( - new Event(1, "park", "192.168.1.25"), - new Event(2, "akio", "192.168.1.5"), - new Event(3, "park", "192.168.1.2"), - new Event(4, "kevin", "192.168.1.3") - ); - for (var c : Map.of(LOCAL_CLUSTER, e0, "c1", e1, "c2", e2).entrySet()) { - Client client = client(c.getKey()); - client.admin() - .indices() - .prepareCreate("events") - .setMapping("timestamp", "type=long", "user", "type=keyword", "host", "type=ip") - .get(); - for (var e : c.getValue()) { - client.prepareIndex("events").setSource("timestamp", e.timestamp, "user", e.user, "host", e.host).get(); - } - client.admin().indices().prepareRefresh("events").get(); - } - } - public void testEnrichWithHostsPolicyAndDisconnectedRemotesWithSkipUnavailableTrue() throws IOException { setSkipUnavailable(REMOTE_CLUSTER_1, true); setSkipUnavailable(REMOTE_CLUSTER_2, true); @@ -645,19 +505,6 @@ public class CrossClusterEnrichUnavailableClustersIT extends AbstractMultiCluste } } - protected EsqlQueryResponse runQuery(String query, Boolean ccsMetadataInResponse) { - EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); - request.query(query); - request.pragmas(AbstractEsqlIntegTestCase.randomPragmas()); - if (randomBoolean()) { - request.profile(true); - } - if (ccsMetadataInResponse != null) { - request.includeCCSMetadata(ccsMetadataInResponse); - } - return client(LOCAL_CLUSTER).execute(EsqlQueryAction.INSTANCE, request).actionGet(30, TimeUnit.SECONDS); - } - private static void assertCCSExecutionInfoDetails(EsqlExecutionInfo executionInfo) { assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); assertTrue(executionInfo.isCrossClusterSearch()); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterQueryUnavailableRemotesIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterQueryUnavailableRemotesIT.java index d1c9b5cfb2ac..f65764daafb8 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterQueryUnavailableRemotesIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClusterQueryUnavailableRemotesIT.java @@ -18,7 +18,6 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.AbstractMultiClustersTestCase; import org.elasticsearch.test.XContentTestUtils; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import java.io.IOException; import java.util.ArrayList; @@ -54,8 +53,8 @@ public class CrossClusterQueryUnavailableRemotesIT extends AbstractMultiClusters @Override protected Collection> nodePlugins(String clusterAlias) { List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias)); - plugins.add(EsqlPlugin.class); - plugins.add(org.elasticsearch.xpack.esql.action.CrossClustersQueryIT.InternalExchangePlugin.class); + plugins.add(EsqlPluginWithEnterpriseOrTrialLicense.class); + plugins.add(CrossClustersQueryIT.InternalExchangePlugin.class); return plugins; } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java index 5291ad3b0d03..17f5f8148665 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersCancellationIT.java @@ -33,7 +33,6 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.esql.plugin.ComputeService; -import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.junit.Before; import java.util.ArrayList; @@ -62,7 +61,7 @@ public class CrossClustersCancellationIT extends AbstractMultiClustersTestCase { @Override protected Collection> nodePlugins(String clusterAlias) { List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias)); - plugins.add(EsqlPlugin.class); + plugins.add(EsqlPluginWithEnterpriseOrTrialLicense.class); plugins.add(InternalExchangePlugin.class); plugins.add(PauseFieldPlugin.class); return plugins; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java index 57f85751999a..4e6be6cc2bf7 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java @@ -7,218 +7,34 @@ package org.elasticsearch.xpack.esql.action; -import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.TransportAction; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.core.Tuple; -import org.elasticsearch.ingest.common.IngestCommonPlugin; -import org.elasticsearch.injection.guice.Inject; -import org.elasticsearch.license.LicenseService; -import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.protocol.xpack.XPackInfoRequest; -import org.elasticsearch.protocol.xpack.XPackInfoResponse; -import org.elasticsearch.reindex.ReindexPlugin; -import org.elasticsearch.test.AbstractMultiClustersTestCase; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; -import org.elasticsearch.xpack.core.XPackSettings; -import org.elasticsearch.xpack.core.action.TransportXPackInfoAction; -import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; -import org.elasticsearch.xpack.core.action.XPackInfoFeatureResponse; -import org.elasticsearch.xpack.core.enrich.EnrichPolicy; -import org.elasticsearch.xpack.core.enrich.action.DeleteEnrichPolicyAction; -import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; -import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; -import org.elasticsearch.xpack.enrich.EnrichPlugin; -import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.plan.logical.Enrich; -import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; -import org.junit.After; -import org.junit.Before; -import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Locale; -import java.util.Map; import java.util.Set; -import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.xpack.esql.EsqlTestUtils.getValuesList; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; -public class CrossClustersEnrichIT extends AbstractMultiClustersTestCase { - - @Override - protected List remoteClusterAlias() { - return List.of("c1", "c2"); - } - - protected Collection allClusters() { - return CollectionUtils.appendToCopy(remoteClusterAlias(), LOCAL_CLUSTER); - } +public class CrossClustersEnrichIT extends AbstractEnrichBasedCrossClusterTestCase { @Override protected Collection> nodePlugins(String clusterAlias) { List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias)); - plugins.add(EsqlPlugin.class); - plugins.add(LocalStateEnrich.class); - plugins.add(IngestCommonPlugin.class); - plugins.add(ReindexPlugin.class); + plugins.add(EsqlPluginWithEnterpriseOrTrialLicense.class); return plugins; } - @Override - protected Settings nodeSettings() { - return Settings.builder().put(super.nodeSettings()).put(XPackSettings.SECURITY_ENABLED.getKey(), false).build(); - } - - static final EnrichPolicy hostPolicy = new EnrichPolicy("match", null, List.of("hosts"), "ip", List.of("ip", "os")); - static final EnrichPolicy vendorPolicy = new EnrichPolicy("match", null, List.of("vendors"), "os", List.of("os", "vendor")); - - @Before - public void setupHostsEnrich() { - // the hosts policy are identical on every node - Map allHosts = Map.of( - "192.168.1.2", - "Windows", - "192.168.1.3", - "MacOS", - "192.168.1.4", - "Linux", - "192.168.1.5", - "Android", - "192.168.1.6", - "iOS", - "192.168.1.7", - "Windows", - "192.168.1.8", - "MacOS", - "192.168.1.9", - "Linux", - "192.168.1.10", - "Linux", - "192.168.1.11", - "Windows" - ); - for (String cluster : allClusters()) { - Client client = client(cluster); - client.admin().indices().prepareCreate("hosts").setMapping("ip", "type=ip", "os", "type=keyword").get(); - for (Map.Entry h : allHosts.entrySet()) { - client.prepareIndex("hosts").setSource("ip", h.getKey(), "os", h.getValue()).get(); - } - client.admin().indices().prepareRefresh("hosts").get(); - client.execute(PutEnrichPolicyAction.INSTANCE, new PutEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, "hosts", hostPolicy)) - .actionGet(); - client.execute(ExecuteEnrichPolicyAction.INSTANCE, new ExecuteEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, "hosts")) - .actionGet(); - assertAcked(client.admin().indices().prepareDelete("hosts")); - } - } - - @Before - public void setupVendorPolicy() { - var localVendors = Map.of("Windows", "Microsoft", "MacOS", "Apple", "iOS", "Apple", "Android", "Samsung", "Linux", "Redhat"); - var c1Vendors = Map.of("Windows", "Microsoft", "MacOS", "Apple", "iOS", "Apple", "Android", "Google", "Linux", "Suse"); - var c2Vendors = Map.of("Windows", "Microsoft", "MacOS", "Apple", "iOS", "Apple", "Android", "Sony", "Linux", "Ubuntu"); - var vendors = Map.of(LOCAL_CLUSTER, localVendors, "c1", c1Vendors, "c2", c2Vendors); - for (Map.Entry> e : vendors.entrySet()) { - Client client = client(e.getKey()); - client.admin().indices().prepareCreate("vendors").setMapping("os", "type=keyword", "vendor", "type=keyword").get(); - for (Map.Entry v : e.getValue().entrySet()) { - client.prepareIndex("vendors").setSource("os", v.getKey(), "vendor", v.getValue()).get(); - } - client.admin().indices().prepareRefresh("vendors").get(); - client.execute(PutEnrichPolicyAction.INSTANCE, new PutEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, "vendors", vendorPolicy)) - .actionGet(); - client.execute(ExecuteEnrichPolicyAction.INSTANCE, new ExecuteEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, "vendors")) - .actionGet(); - assertAcked(client.admin().indices().prepareDelete("vendors")); - } - } - - @Before - public void setupEventsIndices() { - record Event(long timestamp, String user, String host) { - - } - List e0 = List.of( - new Event(1, "matthew", "192.168.1.3"), - new Event(2, "simon", "192.168.1.5"), - new Event(3, "park", "192.168.1.2"), - new Event(4, "andrew", "192.168.1.7"), - new Event(5, "simon", "192.168.1.20"), - new Event(6, "kevin", "192.168.1.2"), - new Event(7, "akio", "192.168.1.5"), - new Event(8, "luke", "192.168.1.2"), - new Event(9, "jack", "192.168.1.4") - ); - List e1 = List.of( - new Event(1, "andres", "192.168.1.2"), - new Event(2, "sergio", "192.168.1.6"), - new Event(3, "kylian", "192.168.1.8"), - new Event(4, "andrew", "192.168.1.9"), - new Event(5, "jack", "192.168.1.3"), - new Event(6, "kevin", "192.168.1.4"), - new Event(7, "akio", "192.168.1.7"), - new Event(8, "kevin", "192.168.1.21"), - new Event(9, "andres", "192.168.1.8") - ); - List e2 = List.of( - new Event(1, "park", "192.168.1.25"), - new Event(2, "akio", "192.168.1.5"), - new Event(3, "park", "192.168.1.2"), - new Event(4, "kevin", "192.168.1.3") - ); - for (var c : Map.of(LOCAL_CLUSTER, e0, "c1", e1, "c2", e2).entrySet()) { - Client client = client(c.getKey()); - client.admin() - .indices() - .prepareCreate("events") - .setMapping("timestamp", "type=long", "user", "type=keyword", "host", "type=ip") - .get(); - for (var e : c.getValue()) { - client.prepareIndex("events").setSource("timestamp", e.timestamp, "user", e.user, "host", e.host).get(); - } - client.admin().indices().prepareRefresh("events").get(); - } - } - - @After - public void wipeEnrichPolicies() { - for (String cluster : allClusters()) { - cluster(cluster).wipe(Set.of()); - for (String policy : List.of("hosts", "vendors")) { - client(cluster).execute( - DeleteEnrichPolicyAction.INSTANCE, - new DeleteEnrichPolicyAction.Request(TEST_REQUEST_TIMEOUT, policy) - ); - } - } - } - - static String enrichHosts(Enrich.Mode mode) { - return EsqlTestUtils.randomEnrichCommand("hosts", mode, hostPolicy.getMatchField(), hostPolicy.getEnrichFields()); - } - - static String enrichVendors(Enrich.Mode mode) { - return EsqlTestUtils.randomEnrichCommand("vendors", mode, vendorPolicy.getMatchField(), vendorPolicy.getEnrichFields()); - } - public void testWithHostsPolicy() { for (var mode : Enrich.Mode.values()) { String query = "FROM events | eval ip= TO_STR(host) | " + enrichHosts(mode) + " | stats c = COUNT(*) by os | SORT os"; @@ -606,19 +422,6 @@ public class CrossClustersEnrichIT extends AbstractMultiClustersTestCase { ); } - protected EsqlQueryResponse runQuery(String query, Boolean ccsMetadataInResponse) { - EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); - request.query(query); - request.pragmas(AbstractEsqlIntegTestCase.randomPragmas()); - if (randomBoolean()) { - request.profile(true); - } - if (ccsMetadataInResponse != null) { - request.includeCCSMetadata(ccsMetadataInResponse); - } - return client(LOCAL_CLUSTER).execute(EsqlQueryAction.INSTANCE, request).actionGet(30, TimeUnit.SECONDS); - } - private static void assertCCSExecutionInfoDetails(EsqlExecutionInfo executionInfo) { assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); assertTrue(executionInfo.isCrossClusterSearch()); @@ -637,49 +440,4 @@ public class CrossClustersEnrichIT extends AbstractMultiClustersTestCase { assertThat(cluster.getFailedShards(), equalTo(0)); } } - - public static Tuple randomIncludeCCSMetadata() { - return switch (randomIntBetween(1, 3)) { - case 1 -> new Tuple<>(Boolean.TRUE, Boolean.TRUE); - case 2 -> new Tuple<>(Boolean.FALSE, Boolean.FALSE); - case 3 -> new Tuple<>(null, Boolean.FALSE); - default -> throw new AssertionError("should not get here"); - }; - } - - public static class LocalStateEnrich extends LocalStateCompositeXPackPlugin { - - public LocalStateEnrich(final Settings settings, final Path configPath) throws Exception { - super(settings, configPath); - - plugins.add(new EnrichPlugin(settings) { - @Override - protected XPackLicenseState getLicenseState() { - return this.getLicenseState(); - } - }); - } - - public static class EnrichTransportXPackInfoAction extends TransportXPackInfoAction { - @Inject - public EnrichTransportXPackInfoAction( - TransportService transportService, - ActionFilters actionFilters, - LicenseService licenseService, - NodeClient client - ) { - super(transportService, actionFilters, licenseService, client); - } - - @Override - protected List> infoActions() { - return Collections.singletonList(XPackInfoFeatureAction.ENRICH); - } - } - - @Override - protected Class> getInfoAction() { - return EnrichTransportXPackInfoAction.class; - } - } } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueriesWithInvalidLicenseIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueriesWithInvalidLicenseIT.java new file mode 100644 index 000000000000..1ed42b696d65 --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueriesWithInvalidLicenseIT.java @@ -0,0 +1,203 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.xpack.esql.plan.logical.Enrich; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.Locale; +import java.util.Set; + +import static org.elasticsearch.xpack.esql.EsqlTestUtils.getValuesList; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; + +public class CrossClustersQueriesWithInvalidLicenseIT extends AbstractEnrichBasedCrossClusterTestCase { + + private static final String LICENSE_ERROR_MESSAGE = "A valid Enterprise license is required to run ES|QL cross-cluster searches."; + + @Override + protected Collection> nodePlugins(String clusterAlias) { + List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias)); + plugins.add(EsqlPluginWithNonEnterpriseOrExpiredLicense.class); // key plugin for the test + return plugins; + } + + public void testBasicCrossClusterQuery() { + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); + Boolean requestIncludeMeta = includeCCSMetadata.v1(); + ElasticsearchStatusException e = expectThrows( + ElasticsearchStatusException.class, + () -> runQuery("FROM *,*:* | LIMIT 5", requestIncludeMeta) + ); + assertThat(e.getMessage(), containsString(LICENSE_ERROR_MESSAGE)); + } + + public void testMetadataCrossClusterQuery() { + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); + Boolean requestIncludeMeta = includeCCSMetadata.v1(); + ElasticsearchStatusException e = expectThrows( + ElasticsearchStatusException.class, + () -> runQuery("FROM events,*:* METADATA _index | SORT _index", requestIncludeMeta) + ); + assertThat(e.getMessage(), containsString(LICENSE_ERROR_MESSAGE)); + } + + public void testQueryAgainstNonMatchingClusterWildcardPattern() { + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); + Boolean requestIncludeMeta = includeCCSMetadata.v1(); + boolean responseExpectMeta = includeCCSMetadata.v2(); + + // since this wildcarded expression does not resolve to a valid remote cluster, it is not considered + // a cross-cluster search and thus should not throw a license error + String q = "FROM xremote*:events"; + { + String limit1 = q + " | STATS count(*)"; + try (EsqlQueryResponse resp = runQuery(limit1, requestIncludeMeta)) { + assertThat(resp.columns().size(), equalTo(1)); + EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); + assertThat(executionInfo.isCrossClusterSearch(), is(false)); + assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); + } + + String limit0 = q + " | LIMIT 0"; + try (EsqlQueryResponse resp = runQuery(limit0, requestIncludeMeta)) { + assertThat(resp.columns().size(), equalTo(1)); + assertThat(getValuesList(resp).size(), equalTo(0)); + EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); + assertThat(executionInfo.isCrossClusterSearch(), is(false)); + assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); + } + } + } + + public void testCCSWithLimit0() { + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); + Boolean requestIncludeMeta = includeCCSMetadata.v1(); + + // local only query does not need a valid Enterprise or Trial license + try (EsqlQueryResponse resp = runQuery("FROM events | LIMIT 0", requestIncludeMeta)) { + EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); + assertNotNull(executionInfo); + assertThat(executionInfo.isCrossClusterSearch(), is(false)); + assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); + } + + // cross-cluster searches should fail with license error + String q = randomFrom("FROM events,c1:* | LIMIT 0", "FROM c1:* | LIMIT 0"); + ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> runQuery(q, requestIncludeMeta)); + assertThat(e.getMessage(), containsString(LICENSE_ERROR_MESSAGE)); + } + + public void testSearchesWhereNonExistentClusterIsSpecified() { + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); + Boolean requestIncludeMeta = includeCCSMetadata.v1(); + boolean responseExpectMeta = includeCCSMetadata.v2(); + + // this one query should be allowed since x* does not resolve to any known remote cluster + try (EsqlQueryResponse resp = runQuery("FROM events,x*:no_such_index* | STATS count(*)", requestIncludeMeta)) { + EsqlExecutionInfo executionInfo = resp.getExecutionInfo(); + List> values = getValuesList(resp); + assertThat(values, hasSize(1)); + + assertNotNull(executionInfo); + assertThat(executionInfo.clusterAliases(), equalTo(Set.of(LOCAL_CLUSTER))); + assertThat(executionInfo.isCrossClusterSearch(), is(false)); + assertThat(executionInfo.includeCCSMetadata(), equalTo(responseExpectMeta)); + // since this not a CCS, only the overall took time in the EsqlExecutionInfo matters + assertThat(executionInfo.overallTook().millis(), greaterThanOrEqualTo(0L)); + } + + ElasticsearchStatusException e = expectThrows( + ElasticsearchStatusException.class, + () -> runQuery("FROM events,no_such_cluster:no_such_index* | STATS count(*)", requestIncludeMeta) + ); + // with a valid license this would throw "no such remote cluster" exception, but without a valid license, should get a license error + assertThat(e.getMessage(), containsString(LICENSE_ERROR_MESSAGE)); + } + + public void testEnrichWithHostsPolicy() { + // local-only queries do not need an Enterprise or Trial license + for (var mode : Enrich.Mode.values()) { + String query = "FROM events | eval ip= TO_STR(host) | " + enrichHosts(mode) + " | stats c = COUNT(*) by os | SORT os"; + try (EsqlQueryResponse resp = runQuery(query, null)) { + List> rows = getValuesList(resp); + assertThat( + rows, + equalTo( + List.of( + List.of(2L, "Android"), + List.of(1L, "Linux"), + List.of(1L, "MacOS"), + List.of(4L, "Windows"), + Arrays.asList(1L, (String) null) + ) + ) + ); + assertFalse(resp.getExecutionInfo().isCrossClusterSearch()); + } + } + + // cross-cluster query should fail due to not having valid Enterprise or Trial license + Tuple includeCCSMetadata = randomIncludeCCSMetadata(); + Boolean requestIncludeMeta = includeCCSMetadata.v1(); + + for (var mode : Enrich.Mode.values()) { + String query = "FROM *:events | eval ip= TO_STR(host) | " + enrichHosts(mode) + " | stats c = COUNT(*) by os | SORT os"; + ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> runQuery(query, requestIncludeMeta)); + assertThat(e.getMessage(), containsString("A valid Enterprise license is required to run ES|QL cross-cluster searches.")); + } + + for (var mode : Enrich.Mode.values()) { + String query = "FROM *:events,events | eval ip= TO_STR(host) | " + enrichHosts(mode) + " | stats c = COUNT(*) by os | SORT os"; + ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> runQuery(query, requestIncludeMeta)); + assertThat(e.getMessage(), containsString("A valid Enterprise license is required to run ES|QL cross-cluster searches.")); + } + } + + public void testAggThenEnrichRemote() { + String query = String.format(Locale.ROOT, """ + FROM *:events,events + | eval ip= TO_STR(host) + | %s + | stats c = COUNT(*) by os + | %s + | sort vendor + """, enrichHosts(Enrich.Mode.ANY), enrichVendors(Enrich.Mode.REMOTE)); + var error = expectThrows(ElasticsearchStatusException.class, () -> runQuery(query, randomBoolean()).close()); + // with a valid license this would fail with "ENRICH with remote policy can't be executed after STATS", so ensure here + // that the license error is detected first and returned rather than a VerificationException + assertThat(error.getMessage(), containsString(LICENSE_ERROR_MESSAGE)); + } + + public void testEnrichCoordinatorThenEnrichRemote() { + String query = String.format(Locale.ROOT, """ + FROM *:events,events + | eval ip= TO_STR(host) + | %s + | %s + | sort vendor + """, enrichHosts(Enrich.Mode.COORDINATOR), enrichVendors(Enrich.Mode.REMOTE)); + var error = expectThrows(ElasticsearchStatusException.class, () -> runQuery(query, randomBoolean()).close()); + assertThat( + error.getMessage(), + // with a valid license the error is "ENRICH with remote policy can't be executed after another ENRICH with coordinator policy", + // so ensure here that the license error is detected first and returned rather than a VerificationException + containsString(LICENSE_ERROR_MESSAGE) + ); + } +} diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java index 46bbad5551e6..347ef419cab9 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersQueryIT.java @@ -32,7 +32,6 @@ import org.elasticsearch.test.XContentTestUtils; import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.esql.VerificationException; -import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import java.io.IOException; @@ -73,13 +72,13 @@ public class CrossClustersQueryIT extends AbstractMultiClustersTestCase { @Override protected Map skipUnavailableForRemoteClusters() { - return Map.of(REMOTE_CLUSTER_1, randomBoolean()); + return Map.of(REMOTE_CLUSTER_1, randomBoolean(), REMOTE_CLUSTER_2, randomBoolean()); } @Override protected Collection> nodePlugins(String clusterAlias) { List> plugins = new ArrayList<>(super.nodePlugins(clusterAlias)); - plugins.add(EsqlPlugin.class); + plugins.add(EsqlPluginWithEnterpriseOrTrialLicense.class); plugins.add(InternalExchangePlugin.class); return plugins; } @@ -184,7 +183,7 @@ public class CrossClustersQueryIT extends AbstractMultiClustersTestCase { } public void testSearchesAgainstNonMatchingIndicesWithLocalOnly() { - Map testClusterInfo = setupClusters(2); + Map testClusterInfo = setupTwoClusters(); String localIndex = (String) testClusterInfo.get("local.index"); { @@ -905,7 +904,7 @@ public class CrossClustersQueryIT extends AbstractMultiClustersTestCase { // cluster-foo* matches nothing and so should not be present in the EsqlExecutionInfo try ( EsqlQueryResponse resp = runQuery( - "from logs-*,no_such_index*,cluster-a:no_such_index*,cluster-foo*:* | stats sum (v)", + "FROM logs-*,no_such_index*,cluster-a:no_such_index*,cluster-foo*:* | STATS sum (v)", requestIncludeMeta ) ) { @@ -1009,7 +1008,7 @@ public class CrossClustersQueryIT extends AbstractMultiClustersTestCase { try ( EsqlQueryResponse resp = runQuery( - "FROM logs*,*:logs* METADATA _index | stats sum(v) by _index | sort _index", + Strings.format("FROM logs*,%s:logs* METADATA _index | stats sum(v) by _index | sort _index", REMOTE_CLUSTER_1), requestIncludeMeta ) ) { @@ -1091,7 +1090,7 @@ public class CrossClustersQueryIT extends AbstractMultiClustersTestCase { final int remoteOnlyProfiles; { EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); - request.query("FROM *:logs* | stats sum(v)"); + request.query("FROM c*:logs* | stats sum(v)"); request.pragmas(pragmas); request.profile(true); try (EsqlQueryResponse resp = runQuery(request)) { @@ -1124,7 +1123,7 @@ public class CrossClustersQueryIT extends AbstractMultiClustersTestCase { final int allProfiles; { EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); - request.query("FROM logs*,*:logs* | stats total = sum(v)"); + request.query("FROM logs*,c*:logs* | stats total = sum(v)"); request.pragmas(pragmas); request.profile(true); try (EsqlQueryResponse resp = runQuery(request)) { @@ -1169,7 +1168,7 @@ public class CrossClustersQueryIT extends AbstractMultiClustersTestCase { int remoteNumShards = (Integer) testClusterInfo.get("remote.num_shards"); EsqlQueryRequest request = EsqlQueryRequest.syncEsqlQueryRequest(); - request.query("FROM logs*,*:logs* | EVAL ip = to_ip(id) | STATS total = sum(v) by ip | LIMIT 10"); + request.query("FROM logs*,c*:logs* | EVAL ip = to_ip(id) | STATS total = sum(v) by ip | LIMIT 10"); InternalTestCluster cluster = cluster(LOCAL_CLUSTER); String node = randomFrom(cluster.getNodeNames()); CountDownLatch latch = new CountDownLatch(1); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlPluginWithEnterpriseOrTrialLicense.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlPluginWithEnterpriseOrTrialLicense.java new file mode 100644 index 000000000000..34d09fc54157 --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlPluginWithEnterpriseOrTrialLicense.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.license.License; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.license.internal.XPackLicenseStatus; +import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; + +import static org.elasticsearch.test.ESTestCase.randomFrom; + +/** + * In IT tests, use this instead of the EsqlPlugin in order to use ES|QL features + * that require an Enteprise (or Trial) license. + */ +public class EsqlPluginWithEnterpriseOrTrialLicense extends EsqlPlugin { + protected XPackLicenseState getLicenseState() { + License.OperationMode operationMode = randomFrom(License.OperationMode.ENTERPRISE, License.OperationMode.TRIAL); + return new XPackLicenseState(() -> System.currentTimeMillis(), new XPackLicenseStatus(operationMode, true, "Test license expired")); + } +} diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlPluginWithNonEnterpriseOrExpiredLicense.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlPluginWithNonEnterpriseOrExpiredLicense.java new file mode 100644 index 000000000000..46c3f3f6204c --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlPluginWithNonEnterpriseOrExpiredLicense.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.license.License; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.license.internal.XPackLicenseStatus; +import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; + +import static org.elasticsearch.test.ESTestCase.randomBoolean; +import static org.elasticsearch.test.ESTestCase.randomFrom; + +/** + * In IT tests, use this instead of the EsqlPlugin in order to test ES|QL features + * using either a: + * - an active (non-expired) basic, standard, missing, gold or platinum Elasticsearch license, OR + * - an expired enterprise or trial license + */ +public class EsqlPluginWithNonEnterpriseOrExpiredLicense extends EsqlPlugin { + protected XPackLicenseState getLicenseState() { + License.OperationMode operationMode; + boolean active; + if (randomBoolean()) { + operationMode = randomFrom( + License.OperationMode.PLATINUM, + License.OperationMode.GOLD, + License.OperationMode.BASIC, + License.OperationMode.MISSING, + License.OperationMode.STANDARD + ); + active = true; + } else { + operationMode = randomFrom(License.OperationMode.ENTERPRISE, License.OperationMode.TRIAL); + active = false; // expired + } + + return new XPackLicenseState( + () -> System.currentTimeMillis(), + new XPackLicenseStatus(operationMode, active, "Test license expired") + ); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 1aee1df3dbaf..6436e049c7dd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -419,6 +419,10 @@ public class EsqlCapabilities { */ CATEGORIZE_V5, + /** + * Support for multiple groupings in "CATEGORIZE". + */ + CATEGORIZE_MULTIPLE_GROUPINGS, /** * QSTR function */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index a0728c9a9108..f01cc265e330 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -18,7 +18,6 @@ import org.elasticsearch.xpack.esql.core.expression.AttributeSet; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; -import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.expression.function.Function; @@ -208,7 +207,6 @@ public class Verifier { checkJoin(p, failures); }); checkRemoteEnrich(plan, failures); - checkMetadataScoreNameReserved(plan, failures); if (failures.isEmpty()) { checkLicense(plan, licenseState, failures); @@ -222,13 +220,6 @@ public class Verifier { return failures; } - private static void checkMetadataScoreNameReserved(LogicalPlan p, Set failures) { - // _score can only be set as metadata attribute - if (p.inputSet().stream().anyMatch(a -> MetadataAttribute.SCORE.equals(a.name()) && (a instanceof MetadataAttribute) == false)) { - failures.add(fail(p, "`" + MetadataAttribute.SCORE + "` is a reserved METADATA attribute")); - } - } - private void checkSort(LogicalPlan p, Set failures) { if (p instanceof OrderBy ob) { ob.order().forEach(o -> { @@ -325,11 +316,15 @@ public class Verifier { private static void checkCategorizeGrouping(Aggregate agg, Set failures) { // Forbid CATEGORIZE grouping function with other groupings if (agg.groupings().size() > 1) { - agg.groupings().forEach(g -> { + agg.groupings().subList(1, agg.groupings().size()).forEach(g -> { g.forEachDown( Categorize.class, categorize -> failures.add( - fail(categorize, "cannot use CATEGORIZE grouping function [{}] with multiple groupings", categorize.sourceText()) + fail( + categorize, + "CATEGORIZE grouping function [{}] can only be in the first grouping expression", + categorize.sourceText() + ) ) ); }); @@ -610,6 +605,10 @@ public class Verifier { functions.forEach(f -> metrics.incFunctionMetric(f)); } + public XPackLicenseState licenseState() { + return licenseState; + } + /** * Limit QL's comparisons to types we support. This should agree with * {@link EsqlBinaryComparison}'s checkCompatibility method diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java index ded913a78bdf..a100dd64915f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/grouping/Categorize.java @@ -95,7 +95,8 @@ public class Categorize extends GroupingFunction implements Validatable { @Override public Nullability nullable() { - // Both nulls and empty strings result in null values + // Null strings and strings that don't produce tokens after analysis lead to null values. + // This includes empty strings, only whitespace, (hexa)decimal numbers and stopwords. return Nullability.TRUE; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalVerifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalVerifier.java index 20528f8dc282..9132cf87541b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalVerifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalVerifier.java @@ -12,7 +12,9 @@ import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.optimizer.rules.PlanConsistencyChecker; +import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; +import org.elasticsearch.xpack.esql.plan.physical.EnrichExec; import org.elasticsearch.xpack.esql.plan.physical.FieldExtractExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; @@ -35,6 +37,12 @@ public final class PhysicalVerifier { Set failures = new LinkedHashSet<>(); Failures depFailures = new Failures(); + // AwaitsFix https://github.com/elastic/elasticsearch/issues/118531 + var enriches = plan.collectFirstChildren(EnrichExec.class::isInstance); + if (enriches.isEmpty() == false && ((EnrichExec) enriches.get(0)).mode() == Enrich.Mode.REMOTE) { + return failures; + } + plan.forEachDown(p -> { if (p instanceof AggregateExec agg) { var exclude = Expressions.references(agg.ordinalAttributes()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlLicenseChecker.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlLicenseChecker.java new file mode 100644 index 000000000000..0a52ee75de3b --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlLicenseChecker.java @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.session; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.license.License; +import org.elasticsearch.license.LicensedFeature; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.rest.RestStatus; + +public class EsqlLicenseChecker { + + public static final LicensedFeature.Momentary CCS_FEATURE = LicensedFeature.momentary( + null, + "esql-ccs", + License.OperationMode.ENTERPRISE + ); + + /** + * Only call this method once you know the user is doing a cross-cluster query, as it will update + * the license_usage timestamp for the esql-ccs feature if the license is Enterprise (or Trial). + * @param licenseState + * @return true if the user has a license that allows ESQL CCS. + */ + public static boolean isCcsAllowed(XPackLicenseState licenseState) { + if (licenseState == null) { + return false; + } + return CCS_FEATURE.check(licenseState); + } + + /** + * @param licenseState existing license state. Need to extract info on the current installed license. + * @return ElasticsearchStatusException with an error message informing the caller what license is needed + * to run ES|QL cross-cluster searches and what license (if any) was found. + */ + public static ElasticsearchStatusException invalidLicenseForCcsException(XPackLicenseState licenseState) { + String message = "A valid Enterprise license is required to run ES|QL cross-cluster searches. License found: "; + if (licenseState == null) { + message += "none"; + } else { + message += licenseState.statusDescription(); + } + return new ElasticsearchStatusException(message, RestStatus.BAD_REQUEST); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index 4f7c620bc8d1..83480f6651ab 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -298,6 +298,9 @@ public class EsqlSession { .map(e -> new EnrichPolicyResolver.UnresolvedPolicy((String) e.policyName().fold(), e.mode())) .collect(Collectors.toSet()); final List indices = preAnalysis.indices; + + EsqlSessionCCSUtils.checkForCcsLicense(indices, indicesExpressionGrouper, verifier.licenseState()); + // TODO: make a separate call for lookup indices final Set targetClusters = enrichPolicyResolver.groupIndicesPerCluster( indices.stream().flatMap(t -> Arrays.stream(Strings.commaDelimitedListToStringArray(t.id().index()))).toArray(String[]::new) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSessionCCSUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSessionCCSUtils.java index 4fe2fef7e3f4..662572c46651 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSessionCCSUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSessionCCSUtils.java @@ -9,17 +9,24 @@ package org.elasticsearch.xpack.esql.session; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesFailure; import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.indices.IndicesExpressionGrouper; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.NoSuchRemoteClusterException; import org.elasticsearch.transport.RemoteClusterAware; +import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.transport.RemoteTransportException; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.EsqlExecutionInfo; import org.elasticsearch.xpack.esql.analysis.Analyzer; +import org.elasticsearch.xpack.esql.analysis.TableInfo; import org.elasticsearch.xpack.esql.index.IndexResolution; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; @@ -255,6 +262,9 @@ class EsqlSessionCCSUtils { } private static boolean concreteIndexRequested(String indexExpression) { + if (Strings.isNullOrBlank(indexExpression)) { + return false; + } for (String expr : indexExpression.split(",")) { if (expr.charAt(0) == '<' || expr.startsWith("-<")) { // skip date math expressions @@ -288,4 +298,37 @@ class EsqlSessionCCSUtils { } } } + + /** + * Checks the index expression for the presence of remote clusters. If found, it will ensure that the caller + * has a valid Enterprise (or Trial) license on the querying cluster. + * @param indices index expression requested by user + * @param indicesGrouper grouper of index expressions by cluster alias + * @param licenseState license state on the querying cluster + * @throws org.elasticsearch.ElasticsearchStatusException if the license is not valid (or present) for ES|QL CCS search. + */ + public static void checkForCcsLicense( + List indices, + IndicesExpressionGrouper indicesGrouper, + XPackLicenseState licenseState + ) { + for (TableInfo tableInfo : indices) { + Map groupedIndices; + try { + groupedIndices = indicesGrouper.groupIndices(IndicesOptions.DEFAULT, tableInfo.id().index()); + } catch (NoSuchRemoteClusterException e) { + if (EsqlLicenseChecker.isCcsAllowed(licenseState)) { + throw e; + } else { + throw EsqlLicenseChecker.invalidLicenseForCcsException(licenseState); + } + } + // check if it is a cross-cluster query + if (groupedIndices.size() > 1 || groupedIndices.containsKey(RemoteClusterService.LOCAL_CLUSTER_GROUP_KEY) == false) { + if (EsqlLicenseChecker.isCcsAllowed(licenseState) == false) { + throw EsqlLicenseChecker.invalidLicenseForCcsException(licenseState); + } + } + } + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index d58d233168e2..e20f0d8bbc8f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.EsqlCapabilities; -import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.core.type.InvalidMappedField; @@ -22,7 +21,6 @@ import org.elasticsearch.xpack.esql.index.IndexResolution; import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.parser.QueryParam; import org.elasticsearch.xpack.esql.parser.QueryParams; -import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import java.util.ArrayList; import java.util.LinkedHashMap; @@ -1805,29 +1803,6 @@ public class VerifierTests extends ESTestCase { ); } - public void testNonMetadataScore() { - assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled()); - assertEquals("1:12: `_score` is a reserved METADATA attribute", error("from foo | eval _score = 10")); - - assertEquals( - "1:48: `_score` is a reserved METADATA attribute", - error("from foo metadata _score | where qstr(\"bar\") | eval _score = _score + 1") - ); - } - - public void testScoreRenaming() { - assumeTrue("'METADATA _score' is disabled", EsqlCapabilities.Cap.METADATA_SCORE.isEnabled()); - assertEquals("1:33: `_score` is a reserved METADATA attribute", error("from foo METADATA _id, _score | rename _id as _score")); - - assertTrue(passes("from foo metadata _score | rename _score as foo").stream().anyMatch(a -> a.name().equals("foo"))); - } - - private List passes(String query) { - LogicalPlan logicalPlan = defaultAnalyzer.analyze(parser.createStatement(query)); - assertTrue(logicalPlan.resolved()); - return logicalPlan.output(); - } - public void testIntervalAsString() { // DateTrunc for (String interval : List.of("1 minu", "1 dy", "1.5 minutes", "0.5 days", "minutes 1", "day 5")) { @@ -1894,38 +1869,35 @@ public class VerifierTests extends ESTestCase { ); } - public void testCategorizeSingleGrouping() { - assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V5.isEnabled()); - - query("from test | STATS COUNT(*) BY CATEGORIZE(first_name)"); - query("from test | STATS COUNT(*) BY cat = CATEGORIZE(first_name)"); + public void testCategorizeOnlyFirstGrouping() { + query("FROM test | STATS COUNT(*) BY CATEGORIZE(first_name)"); + query("FROM test | STATS COUNT(*) BY cat = CATEGORIZE(first_name)"); + query("FROM test | STATS COUNT(*) BY CATEGORIZE(first_name), emp_no"); + query("FROM test | STATS COUNT(*) BY a = CATEGORIZE(first_name), b = emp_no"); assertEquals( - "1:31: cannot use CATEGORIZE grouping function [CATEGORIZE(first_name)] with multiple groupings", - error("from test | STATS COUNT(*) BY CATEGORIZE(first_name), emp_no") - ); - assertEquals( - "1:39: cannot use CATEGORIZE grouping function [CATEGORIZE(first_name)] with multiple groupings", + "1:39: CATEGORIZE grouping function [CATEGORIZE(first_name)] can only be in the first grouping expression", error("FROM test | STATS COUNT(*) BY emp_no, CATEGORIZE(first_name)") ); assertEquals( - "1:35: cannot use CATEGORIZE grouping function [CATEGORIZE(first_name)] with multiple groupings", - error("FROM test | STATS COUNT(*) BY a = CATEGORIZE(first_name), b = emp_no") - ); - assertEquals( - "1:31: cannot use CATEGORIZE grouping function [CATEGORIZE(first_name)] with multiple groupings\n" - + "line 1:55: cannot use CATEGORIZE grouping function [CATEGORIZE(last_name)] with multiple groupings", + "1:55: CATEGORIZE grouping function [CATEGORIZE(last_name)] can only be in the first grouping expression", error("FROM test | STATS COUNT(*) BY CATEGORIZE(first_name), CATEGORIZE(last_name)") ); assertEquals( - "1:31: cannot use CATEGORIZE grouping function [CATEGORIZE(first_name)] with multiple groupings", + "1:55: CATEGORIZE grouping function [CATEGORIZE(first_name)] can only be in the first grouping expression", error("FROM test | STATS COUNT(*) BY CATEGORIZE(first_name), CATEGORIZE(first_name)") ); + assertEquals( + "1:63: CATEGORIZE grouping function [CATEGORIZE(last_name)] can only be in the first grouping expression", + error("FROM test | STATS COUNT(*) BY CATEGORIZE(first_name), emp_no, CATEGORIZE(last_name)") + ); + assertEquals( + "1:63: CATEGORIZE grouping function [CATEGORIZE(first_name)] can only be in the first grouping expression", + error("FROM test | STATS COUNT(*) BY CATEGORIZE(first_name), emp_no, CATEGORIZE(first_name)") + ); } public void testCategorizeNestedGrouping() { - assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V5.isEnabled()); - query("from test | STATS COUNT(*) BY CATEGORIZE(LENGTH(first_name)::string)"); assertEquals( @@ -1939,8 +1911,6 @@ public class VerifierTests extends ESTestCase { } public void testCategorizeWithinAggregations() { - assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V5.isEnabled()); - query("from test | STATS MV_COUNT(cat), COUNT(*) BY cat = CATEGORIZE(first_name)"); query("from test | STATS MV_COUNT(CATEGORIZE(first_name)), COUNT(*) BY cat = CATEGORIZE(first_name)"); query("from test | STATS MV_COUNT(CATEGORIZE(first_name)), COUNT(*) BY CATEGORIZE(first_name)"); @@ -1969,8 +1939,6 @@ public class VerifierTests extends ESTestCase { } public void testCategorizeWithFilteredAggregations() { - assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V5.isEnabled()); - query("FROM test | STATS COUNT(*) WHERE first_name == \"John\" BY CATEGORIZE(last_name)"); query("FROM test | STATS COUNT(*) WHERE last_name == \"Doe\" BY CATEGORIZE(last_name)"); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index c2a26845d4e8..87bc11d8388b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.VerificationException; -import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils; @@ -1212,8 +1211,6 @@ public class LogicalPlanOptimizerTests extends ESTestCase { * \_EsRelation[test][_meta_field{f}#23, emp_no{f}#17, first_name{f}#18, ..] */ public void testCombineProjectionWithCategorizeGrouping() { - assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V5.isEnabled()); - var plan = plan(""" from test | eval k = first_name, k1 = k @@ -3949,8 +3946,6 @@ public class LogicalPlanOptimizerTests extends ESTestCase { * \_EsRelation[test][_meta_field{f}#14, emp_no{f}#8, first_name{f}#9, ge..] */ public void testNestedExpressionsInGroupsWithCategorize() { - assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE_V5.isEnabled()); - var plan = optimizedPlan(""" from test | stats c = count(salary) by CATEGORIZE(CONCAT(first_name, "abc")) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlSessionCCSUtilsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlSessionCCSUtilsTests.java index 60b632c443f8..1000c05282fd 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlSessionCCSUtilsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/EsqlSessionCCSUtilsTests.java @@ -8,10 +8,18 @@ package org.elasticsearch.xpack.esql.session; import org.apache.lucene.index.CorruptIndexException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesFailure; import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.Strings; import org.elasticsearch.index.IndexMode; +import org.elasticsearch.indices.IndicesExpressionGrouper; +import org.elasticsearch.license.License; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.license.internal.XPackLicenseStatus; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.NoSeedNodeLeftException; @@ -20,9 +28,11 @@ import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.transport.RemoteTransportException; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.EsqlExecutionInfo; +import org.elasticsearch.xpack.esql.analysis.TableInfo; import org.elasticsearch.xpack.esql.core.type.EsField; import org.elasticsearch.xpack.esql.index.EsIndex; import org.elasticsearch.xpack.esql.index.IndexResolution; +import org.elasticsearch.xpack.esql.plan.TableIdentifier; import org.elasticsearch.xpack.esql.type.EsFieldTests; import java.util.ArrayList; @@ -32,8 +42,12 @@ import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.LongSupplier; import java.util.function.Predicate; +import java.util.stream.Collectors; +import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; +import static org.elasticsearch.xpack.esql.session.EsqlSessionCCSUtils.checkForCcsLicense; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -627,4 +641,148 @@ public class EsqlSessionCCSUtilsTests extends ESTestCase { } } + + public void testCheckForCcsLicense() { + final TestIndicesExpressionGrouper indicesGrouper = new TestIndicesExpressionGrouper(); + + // this seems to be used only for tracking usage of features, not for checking if a license is expired + final LongSupplier currTime = () -> System.currentTimeMillis(); + + XPackLicenseState enterpriseLicenseValid = new XPackLicenseState(currTime, activeLicenseStatus(License.OperationMode.ENTERPRISE)); + XPackLicenseState trialLicenseValid = new XPackLicenseState(currTime, activeLicenseStatus(License.OperationMode.TRIAL)); + XPackLicenseState platinumLicenseValid = new XPackLicenseState(currTime, activeLicenseStatus(License.OperationMode.PLATINUM)); + XPackLicenseState goldLicenseValid = new XPackLicenseState(currTime, activeLicenseStatus(License.OperationMode.GOLD)); + XPackLicenseState basicLicenseValid = new XPackLicenseState(currTime, activeLicenseStatus(License.OperationMode.BASIC)); + XPackLicenseState standardLicenseValid = new XPackLicenseState(currTime, activeLicenseStatus(License.OperationMode.STANDARD)); + XPackLicenseState missingLicense = new XPackLicenseState(currTime, activeLicenseStatus(License.OperationMode.MISSING)); + XPackLicenseState nullLicense = null; + + final XPackLicenseStatus enterpriseStatus = inactiveLicenseStatus(License.OperationMode.ENTERPRISE); + XPackLicenseState enterpriseLicenseInactive = new XPackLicenseState(currTime, enterpriseStatus); + XPackLicenseState trialLicenseInactive = new XPackLicenseState(currTime, inactiveLicenseStatus(License.OperationMode.TRIAL)); + XPackLicenseState platinumLicenseInactive = new XPackLicenseState(currTime, inactiveLicenseStatus(License.OperationMode.PLATINUM)); + XPackLicenseState goldLicenseInactive = new XPackLicenseState(currTime, inactiveLicenseStatus(License.OperationMode.GOLD)); + XPackLicenseState basicLicenseInactive = new XPackLicenseState(currTime, inactiveLicenseStatus(License.OperationMode.BASIC)); + XPackLicenseState standardLicenseInactive = new XPackLicenseState(currTime, inactiveLicenseStatus(License.OperationMode.STANDARD)); + XPackLicenseState missingLicenseInactive = new XPackLicenseState(currTime, inactiveLicenseStatus(License.OperationMode.MISSING)); + + // local only search does not require an enterprise license + { + List indices = new ArrayList<>(); + indices.add(new TableInfo(new TableIdentifier(EMPTY, null, randomFrom("idx", "idx1,idx2*")))); + + checkForCcsLicense(indices, indicesGrouper, enterpriseLicenseValid); + checkForCcsLicense(indices, indicesGrouper, platinumLicenseValid); + checkForCcsLicense(indices, indicesGrouper, goldLicenseValid); + checkForCcsLicense(indices, indicesGrouper, trialLicenseValid); + checkForCcsLicense(indices, indicesGrouper, basicLicenseValid); + checkForCcsLicense(indices, indicesGrouper, standardLicenseValid); + checkForCcsLicense(indices, indicesGrouper, missingLicense); + checkForCcsLicense(indices, indicesGrouper, nullLicense); + + checkForCcsLicense(indices, indicesGrouper, enterpriseLicenseInactive); + checkForCcsLicense(indices, indicesGrouper, platinumLicenseInactive); + checkForCcsLicense(indices, indicesGrouper, goldLicenseInactive); + checkForCcsLicense(indices, indicesGrouper, trialLicenseInactive); + checkForCcsLicense(indices, indicesGrouper, basicLicenseInactive); + checkForCcsLicense(indices, indicesGrouper, standardLicenseInactive); + checkForCcsLicense(indices, indicesGrouper, missingLicenseInactive); + } + + // cross-cluster search requires a valid (active, non-expired) enterprise license OR a valid trial license + { + List indices = new ArrayList<>(); + final String indexExprWithRemotes = randomFrom("remote:idx", "idx1,remote:idx2*,remote:logs,c*:idx4"); + if (randomBoolean()) { + indices.add(new TableInfo(new TableIdentifier(EMPTY, null, indexExprWithRemotes))); + } else { + indices.add(new TableInfo(new TableIdentifier(EMPTY, null, randomFrom("idx", "idx1,idx2*")))); + indices.add(new TableInfo(new TableIdentifier(EMPTY, null, indexExprWithRemotes))); + } + + // licenses that work + checkForCcsLicense(indices, indicesGrouper, enterpriseLicenseValid); + checkForCcsLicense(indices, indicesGrouper, trialLicenseValid); + + // all others fail --- + + // active non-expired non-Enterprise non-Trial licenses + assertLicenseCheckFails(indices, indicesGrouper, platinumLicenseValid, "active platinum license"); + assertLicenseCheckFails(indices, indicesGrouper, goldLicenseValid, "active gold license"); + assertLicenseCheckFails(indices, indicesGrouper, basicLicenseValid, "active basic license"); + assertLicenseCheckFails(indices, indicesGrouper, standardLicenseValid, "active standard license"); + assertLicenseCheckFails(indices, indicesGrouper, missingLicense, "active missing license"); + assertLicenseCheckFails(indices, indicesGrouper, nullLicense, "none"); + + // inactive/expired licenses + assertLicenseCheckFails(indices, indicesGrouper, enterpriseLicenseInactive, "expired enterprise license"); + assertLicenseCheckFails(indices, indicesGrouper, trialLicenseInactive, "expired trial license"); + assertLicenseCheckFails(indices, indicesGrouper, platinumLicenseInactive, "expired platinum license"); + assertLicenseCheckFails(indices, indicesGrouper, goldLicenseInactive, "expired gold license"); + assertLicenseCheckFails(indices, indicesGrouper, basicLicenseInactive, "expired basic license"); + assertLicenseCheckFails(indices, indicesGrouper, standardLicenseInactive, "expired standard license"); + assertLicenseCheckFails(indices, indicesGrouper, missingLicenseInactive, "expired missing license"); + } + } + + private XPackLicenseStatus activeLicenseStatus(License.OperationMode operationMode) { + return new XPackLicenseStatus(operationMode, true, null); + } + + private XPackLicenseStatus inactiveLicenseStatus(License.OperationMode operationMode) { + return new XPackLicenseStatus(operationMode, false, "License Expired 123"); + } + + private void assertLicenseCheckFails( + List indices, + TestIndicesExpressionGrouper indicesGrouper, + XPackLicenseState licenseState, + String expectedErrorMessageSuffix + ) { + ElasticsearchStatusException e = expectThrows( + ElasticsearchStatusException.class, + () -> checkForCcsLicense(indices, indicesGrouper, licenseState) + ); + assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); + assertThat( + e.getMessage(), + equalTo( + "A valid Enterprise license is required to run ES|QL cross-cluster searches. License found: " + expectedErrorMessageSuffix + ) + ); + } + + static class TestIndicesExpressionGrouper implements IndicesExpressionGrouper { + @Override + public Map groupIndices(IndicesOptions indicesOptions, String[] indexExpressions) { + final Map originalIndicesMap = new HashMap<>(); + final String localKey = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; + + for (String expr : indexExpressions) { + assertFalse(Strings.isNullOrBlank(expr)); + String[] split = expr.split(":", 2); + assertTrue("Bad index expression: " + expr, split.length < 3); + String clusterAlias; + String indexExpr; + if (split.length == 1) { + clusterAlias = localKey; + indexExpr = expr; + } else { + clusterAlias = split[0]; + indexExpr = split[1]; + + } + OriginalIndices currIndices = originalIndicesMap.get(clusterAlias); + if (currIndices == null) { + originalIndicesMap.put(clusterAlias, new OriginalIndices(new String[] { indexExpr }, indicesOptions)); + } else { + List indicesList = Arrays.stream(currIndices.indices()).collect(Collectors.toList()); + indicesList.add(indexExpr); + originalIndicesMap.put(clusterAlias, new OriginalIndices(indicesList.toArray(new String[0]), indicesOptions)); + } + } + return originalIndicesMap; + } + } + } diff --git a/x-pack/plugin/ilm/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ilm/CCRIndexLifecycleIT.java b/x-pack/plugin/ilm/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ilm/CCRIndexLifecycleIT.java index 5168cd11eb17..a5d966873dda 100644 --- a/x-pack/plugin/ilm/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ilm/CCRIndexLifecycleIT.java +++ b/x-pack/plugin/ilm/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ilm/CCRIndexLifecycleIT.java @@ -38,7 +38,6 @@ import java.util.Map; import java.util.Optional; import java.util.concurrent.TimeUnit; -import static java.util.Collections.singletonMap; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.core.ilm.ShrinkIndexNameSupplier.SHRUNKEN_INDEX_PREFIX; import static org.hamcrest.Matchers.equalTo; @@ -762,8 +761,8 @@ public class CCRIndexLifecycleIT extends ESCCRRestTestCase { } private void createNewSingletonPolicy(String policyName, String phaseName, LifecycleAction action, TimeValue after) throws IOException { - Phase phase = new Phase(phaseName, after, singletonMap(action.getWriteableName(), action)); - LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policyName, singletonMap(phase.getName(), phase)); + Phase phase = new Phase(phaseName, after, Map.of(action.getWriteableName(), action)); + LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policyName, Map.of(phase.getName(), phase)); XContentBuilder builder = jsonBuilder(); lifecyclePolicy.toXContent(builder, null); final StringEntity entity = new StringEntity("{ \"policy\":" + Strings.toString(builder) + "}", ContentType.APPLICATION_JSON); diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/MigrateToDataTiersIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/MigrateToDataTiersIT.java index 60e71b095039..811d07a43667 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/MigrateToDataTiersIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/MigrateToDataTiersIT.java @@ -46,7 +46,6 @@ import java.util.Locale; import java.util.Map; import java.util.concurrent.TimeUnit; -import static java.util.Collections.singletonMap; import static org.elasticsearch.xpack.TimeSeriesRestDriver.createIndexWithSettings; import static org.elasticsearch.xpack.TimeSeriesRestDriver.createNewSingletonPolicy; import static org.elasticsearch.xpack.TimeSeriesRestDriver.createPolicy; @@ -101,11 +100,11 @@ public class MigrateToDataTiersIT extends ESRestTestCase { Map warmActions = new HashMap<>(); warmActions.put(SetPriorityAction.NAME, new SetPriorityAction(50)); warmActions.put(ForceMergeAction.NAME, new ForceMergeAction(1, null)); - warmActions.put(AllocateAction.NAME, new AllocateAction(null, null, singletonMap("data", "warm"), null, null)); + warmActions.put(AllocateAction.NAME, new AllocateAction(null, null, Map.of("data", "warm"), null, null)); warmActions.put(ShrinkAction.NAME, new ShrinkAction(1, null, false)); Map coldActions = new HashMap<>(); coldActions.put(SetPriorityAction.NAME, new SetPriorityAction(0)); - coldActions.put(AllocateAction.NAME, new AllocateAction(0, null, null, null, singletonMap("data", "cold"))); + coldActions.put(AllocateAction.NAME, new AllocateAction(0, null, null, null, Map.of("data", "cold"))); createPolicy( client(), @@ -114,7 +113,7 @@ public class MigrateToDataTiersIT extends ESRestTestCase { new Phase("warm", TimeValue.ZERO, warmActions), new Phase("cold", TimeValue.timeValueDays(100), coldActions), null, - new Phase("delete", TimeValue.ZERO, singletonMap(DeleteAction.NAME, DeleteAction.WITH_SNAPSHOT_DELETE)) + new Phase("delete", TimeValue.ZERO, Map.of(DeleteAction.NAME, DeleteAction.WITH_SNAPSHOT_DELETE)) ); createIndexWithSettings( @@ -377,11 +376,11 @@ public class MigrateToDataTiersIT extends ESRestTestCase { Map warmActions = new HashMap<>(); warmActions.put(SetPriorityAction.NAME, new SetPriorityAction(50)); warmActions.put(ForceMergeAction.NAME, new ForceMergeAction(1, null)); - warmActions.put(AllocateAction.NAME, new AllocateAction(null, null, singletonMap("data", "warm"), null, null)); + warmActions.put(AllocateAction.NAME, new AllocateAction(null, null, Map.of("data", "warm"), null, null)); warmActions.put(ShrinkAction.NAME, new ShrinkAction(1, null, false)); Map coldActions = new HashMap<>(); coldActions.put(SetPriorityAction.NAME, new SetPriorityAction(0)); - coldActions.put(AllocateAction.NAME, new AllocateAction(0, null, null, null, singletonMap("data", "cold"))); + coldActions.put(AllocateAction.NAME, new AllocateAction(0, null, null, null, Map.of("data", "cold"))); createPolicy( client(), @@ -390,7 +389,7 @@ public class MigrateToDataTiersIT extends ESRestTestCase { new Phase("warm", TimeValue.ZERO, warmActions), new Phase("cold", TimeValue.timeValueDays(100), coldActions), null, - new Phase("delete", TimeValue.ZERO, singletonMap(DeleteAction.NAME, DeleteAction.WITH_SNAPSHOT_DELETE)) + new Phase("delete", TimeValue.ZERO, Map.of(DeleteAction.NAME, DeleteAction.WITH_SNAPSHOT_DELETE)) ); createIndexWithSettings( diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/TimeSeriesRestDriver.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/TimeSeriesRestDriver.java index 3949139db033..a1c7ebc2d8b2 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/TimeSeriesRestDriver.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/TimeSeriesRestDriver.java @@ -41,7 +41,6 @@ import org.elasticsearch.xpack.core.ilm.Step; import java.io.IOException; import java.io.InputStream; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Locale; @@ -50,7 +49,6 @@ import java.util.Optional; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; -import static java.util.Collections.singletonMap; import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.test.ESTestCase.randomAlphaOfLengthBetween; import static org.elasticsearch.test.ESTestCase.randomBoolean; @@ -154,8 +152,8 @@ public final class TimeSeriesRestDriver { LifecycleAction action, TimeValue after ) throws IOException { - Phase phase = new Phase(phaseName, after, singletonMap(action.getWriteableName(), action)); - LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policyName, singletonMap(phase.getName(), phase)); + Phase phase = new Phase(phaseName, after, Map.of(action.getWriteableName(), action)); + LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policyName, Map.of(phase.getName(), phase)); XContentBuilder builder = jsonBuilder(); lifecyclePolicy.toXContent(builder, null); final StringEntity entity = new StringEntity("{ \"policy\":" + Strings.toString(builder) + "}", ContentType.APPLICATION_JSON); @@ -202,7 +200,7 @@ public final class TimeSeriesRestDriver { new AllocateAction( 1, null, - singletonMap("_name", "javaRestTest-0,javaRestTest-1," + "javaRestTest-2," + "javaRestTest-3"), + Map.of("_name", "javaRestTest-0,javaRestTest-1," + "javaRestTest-2," + "javaRestTest-3"), null, null ) @@ -215,7 +213,7 @@ public final class TimeSeriesRestDriver { new AllocateAction( 0, null, - singletonMap("_name", "javaRestTest-0,javaRestTest-1," + "javaRestTest-2," + "javaRestTest-3"), + Map.of("_name", "javaRestTest-0,javaRestTest-1," + "javaRestTest-2," + "javaRestTest-3"), null, null ) @@ -224,7 +222,7 @@ public final class TimeSeriesRestDriver { phases.put("hot", new Phase("hot", hotTime, hotActions)); phases.put("warm", new Phase("warm", TimeValue.ZERO, warmActions)); phases.put("cold", new Phase("cold", TimeValue.ZERO, coldActions)); - phases.put("delete", new Phase("delete", TimeValue.ZERO, singletonMap(DeleteAction.NAME, DeleteAction.WITH_SNAPSHOT_DELETE))); + phases.put("delete", new Phase("delete", TimeValue.ZERO, Map.of(DeleteAction.NAME, DeleteAction.WITH_SNAPSHOT_DELETE))); LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policyName, phases); // PUT policy XContentBuilder builder = jsonBuilder(); @@ -300,7 +298,7 @@ public final class TimeSeriesRestDriver { Map responseMap = XContentHelper.convertToMap(XContentType.JSON.xContent(), is, true); Map indexSettings = (Map) responseMap.get(index); if (indexSettings == null) { - return Collections.emptyMap(); + return Map.of(); } return (Map) indexSettings.get("settings"); } diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/ChangePolicyForIndexIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/ChangePolicyForIndexIT.java index 7f75b010346a..370e00785e84 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/ChangePolicyForIndexIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/ChangePolicyForIndexIT.java @@ -32,7 +32,6 @@ import java.util.HashMap; import java.util.Map; import java.util.concurrent.TimeUnit; -import static java.util.Collections.singletonMap; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.TimeSeriesRestDriver.createIndexWithSettings; import static org.elasticsearch.xpack.TimeSeriesRestDriver.createNewSingletonPolicy; @@ -67,7 +66,7 @@ public class ChangePolicyForIndexIT extends ESRestTestCase { new Phase( "hot", TimeValue.ZERO, - singletonMap(RolloverAction.NAME, new RolloverAction(null, null, null, 1L, null, null, null, null, null, null)) + Map.of(RolloverAction.NAME, new RolloverAction(null, null, null, 1L, null, null, null, null, null, null)) ) ); phases1.put( @@ -75,7 +74,7 @@ public class ChangePolicyForIndexIT extends ESRestTestCase { new Phase( "warm", TimeValue.ZERO, - singletonMap(AllocateAction.NAME, new AllocateAction(1, null, singletonMap("_name", "foobarbaz"), null, null)) + Map.of(AllocateAction.NAME, new AllocateAction(1, null, Map.of("_name", "foobarbaz"), null, null)) ) ); LifecyclePolicy lifecyclePolicy1 = new LifecyclePolicy("policy_1", phases1); @@ -85,7 +84,7 @@ public class ChangePolicyForIndexIT extends ESRestTestCase { new Phase( "hot", TimeValue.ZERO, - singletonMap(RolloverAction.NAME, new RolloverAction(null, null, null, 1000L, null, null, null, null, null, null)) + Map.of(RolloverAction.NAME, new RolloverAction(null, null, null, 1000L, null, null, null, null, null, null)) ) ); phases2.put( @@ -93,15 +92,9 @@ public class ChangePolicyForIndexIT extends ESRestTestCase { new Phase( "warm", TimeValue.ZERO, - singletonMap( + Map.of( AllocateAction.NAME, - new AllocateAction( - 1, - null, - singletonMap("_name", "javaRestTest-0,javaRestTest-1,javaRestTest-2,javaRestTest-3"), - null, - null - ) + new AllocateAction(1, null, Map.of("_name", "javaRestTest-0,javaRestTest-1,javaRestTest-2,javaRestTest-3"), null, null) ) ) ); diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeSeriesLifecycleActionsIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeSeriesLifecycleActionsIT.java index 2b722a6555a0..4c53d711ffde 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeSeriesLifecycleActionsIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeSeriesLifecycleActionsIT.java @@ -58,7 +58,6 @@ import java.util.Locale; import java.util.Map; import java.util.concurrent.TimeUnit; -import static java.util.Collections.singletonMap; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.TimeSeriesRestDriver.createFullPolicy; import static org.elasticsearch.xpack.TimeSeriesRestDriver.createIndexWithSettings; @@ -219,7 +218,7 @@ public class TimeSeriesLifecycleActionsIT extends ESRestTestCase { Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 2).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) ); String allocateNodeName = "javaRestTest-0,javaRestTest-1,javaRestTest-2,javaRestTest-3"; - AllocateAction allocateAction = new AllocateAction(null, null, singletonMap("_name", allocateNodeName), null, null); + AllocateAction allocateAction = new AllocateAction(null, null, Map.of("_name", allocateNodeName), null, null); String endPhase = randomFrom("warm", "cold"); createNewSingletonPolicy(client(), policy, endPhase, allocateAction); updatePolicy(client(), index, policy); @@ -978,7 +977,7 @@ public class TimeSeriesLifecycleActionsIT extends ESRestTestCase { hotActions.put(SetPriorityAction.NAME, new SetPriorityAction(100)); Map phases = new HashMap<>(); phases.put("hot", new Phase("hot", TimeValue.ZERO, hotActions)); - phases.put("delete", new Phase("delete", TimeValue.ZERO, singletonMap(DeleteAction.NAME, DeleteAction.WITH_SNAPSHOT_DELETE))); + phases.put("delete", new Phase("delete", TimeValue.ZERO, Map.of(DeleteAction.NAME, DeleteAction.WITH_SNAPSHOT_DELETE))); LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policy, phases); // PUT policy XContentBuilder builder = jsonBuilder(); @@ -1004,7 +1003,7 @@ public class TimeSeriesLifecycleActionsIT extends ESRestTestCase { phases.put("cold", new Phase("cold", TimeValue.ZERO, coldActions)); phases.put( "delete", - new Phase("delete", TimeValue.timeValueMillis(10000), singletonMap(DeleteAction.NAME, DeleteAction.NO_SNAPSHOT_DELETE)) + new Phase("delete", TimeValue.timeValueMillis(10000), Map.of(DeleteAction.NAME, DeleteAction.NO_SNAPSHOT_DELETE)) ); LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policy, phases); // PUT policy diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java index f00b5b566c15..15e59a159333 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/SearchableSnapshotActionIT.java @@ -47,7 +47,6 @@ import java.util.Locale; import java.util.Map; import java.util.concurrent.TimeUnit; -import static java.util.Collections.singletonMap; import static org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider.INDEX_TOTAL_SHARDS_PER_NODE_SETTING; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.TimeSeriesRestDriver.createComposableTemplate; @@ -185,7 +184,7 @@ public class SearchableSnapshotActionIT extends ESRestTestCase { Map coldActions = Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo)); Map phases = new HashMap<>(); phases.put("cold", new Phase("cold", TimeValue.ZERO, coldActions)); - phases.put("delete", new Phase("delete", TimeValue.timeValueMillis(10000), singletonMap(DeleteAction.NAME, WITH_SNAPSHOT_DELETE))); + phases.put("delete", new Phase("delete", TimeValue.timeValueMillis(10000), Map.of(DeleteAction.NAME, WITH_SNAPSHOT_DELETE))); LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policy, phases); // PUT policy XContentBuilder builder = jsonBuilder(); @@ -455,7 +454,7 @@ public class SearchableSnapshotActionIT extends ESRestTestCase { new Phase( "cold", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) ), null ); @@ -516,12 +515,12 @@ public class SearchableSnapshotActionIT extends ESRestTestCase { new Phase( "cold", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) ), new Phase( "frozen", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) ), null ); @@ -586,7 +585,7 @@ public class SearchableSnapshotActionIT extends ESRestTestCase { new Phase( "cold", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) ), null, null @@ -600,12 +599,12 @@ public class SearchableSnapshotActionIT extends ESRestTestCase { new Phase( "cold", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) ), new Phase( "frozen", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) ), null ); @@ -664,14 +663,14 @@ public class SearchableSnapshotActionIT extends ESRestTestCase { new Phase( "cold", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) ), new Phase( "frozen", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) ), - new Phase("delete", TimeValue.ZERO, singletonMap(DeleteAction.NAME, WITH_SNAPSHOT_DELETE)) + new Phase("delete", TimeValue.ZERO, Map.of(DeleteAction.NAME, WITH_SNAPSHOT_DELETE)) ); assertBusy(() -> { logger.info("--> waiting for [{}] to be deleted...", partiallyMountedIndexName); @@ -695,7 +694,7 @@ public class SearchableSnapshotActionIT extends ESRestTestCase { new Phase( "cold", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) ), null, null @@ -710,12 +709,12 @@ public class SearchableSnapshotActionIT extends ESRestTestCase { new Phase( "cold", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) ), new Phase( "frozen", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) ), null ); @@ -775,10 +774,10 @@ public class SearchableSnapshotActionIT extends ESRestTestCase { new Phase( "cold", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) ), null, - new Phase("delete", TimeValue.ZERO, singletonMap(DeleteAction.NAME, WITH_SNAPSHOT_DELETE)) + new Phase("delete", TimeValue.ZERO, Map.of(DeleteAction.NAME, WITH_SNAPSHOT_DELETE)) ); assertBusy(() -> { logger.info("--> waiting for [{}] to be deleted...", restoredPartiallyMountedIndexName); @@ -803,12 +802,12 @@ public class SearchableSnapshotActionIT extends ESRestTestCase { new Phase( "cold", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) ), new Phase( "frozen", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(secondRepo, randomBoolean())) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(secondRepo, randomBoolean())) ), null ) @@ -934,12 +933,12 @@ public class SearchableSnapshotActionIT extends ESRestTestCase { new Phase( "cold", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean())) ), new Phase( "frozen", TimeValue.ZERO, - singletonMap(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean(), totalShardsPerNode)) + Map.of(SearchableSnapshotAction.NAME, new SearchableSnapshotAction(snapshotRepo, randomBoolean(), totalShardsPerNode)) ), null ); diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/ShrinkActionIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/ShrinkActionIT.java index d2f2dbbd0c9f..2fecf3c617cc 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/ShrinkActionIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/ShrinkActionIT.java @@ -39,7 +39,6 @@ import java.util.Locale; import java.util.Map; import java.util.concurrent.TimeUnit; -import static java.util.Collections.singletonMap; import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.TimeSeriesRestDriver.createIndexWithSettings; @@ -286,7 +285,7 @@ public class ShrinkActionIT extends ESRestTestCase { TimeValue.ZERO, Map.of(migrateAction.getWriteableName(), migrateAction, shrinkAction.getWriteableName(), shrinkAction) ); - LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policy, singletonMap(phase.getName(), phase)); + LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policy, Map.of(phase.getName(), phase)); XContentBuilder builder = jsonBuilder(); lifecyclePolicy.toXContent(builder, null); final StringEntity entity = new StringEntity("{ \"policy\":" + Strings.toString(builder) + "}", ContentType.APPLICATION_JSON); diff --git a/x-pack/plugin/ilm/qa/with-security/src/javaRestTest/java/org/elasticsearch/xpack/security/PermissionsIT.java b/x-pack/plugin/ilm/qa/with-security/src/javaRestTest/java/org/elasticsearch/xpack/security/PermissionsIT.java index 946050017761..12dede7067b0 100644 --- a/x-pack/plugin/ilm/qa/with-security/src/javaRestTest/java/org/elasticsearch/xpack/security/PermissionsIT.java +++ b/x-pack/plugin/ilm/qa/with-security/src/javaRestTest/java/org/elasticsearch/xpack/security/PermissionsIT.java @@ -45,7 +45,6 @@ import java.io.InputStream; import java.util.Map; import java.util.concurrent.TimeUnit; -import static java.util.Collections.singletonMap; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -225,8 +224,8 @@ public class PermissionsIT extends ESRestTestCase { } private void createNewSingletonPolicy(RestClient client, String policy, String phaseName, LifecycleAction action) throws IOException { - Phase phase = new Phase(phaseName, TimeValue.ZERO, singletonMap(action.getWriteableName(), action)); - LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policy, singletonMap(phase.getName(), phase)); + Phase phase = new Phase(phaseName, TimeValue.ZERO, Map.of(action.getWriteableName(), action)); + LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policy, Map.of(phase.getName(), phase)); XContentBuilder builder = jsonBuilder(); lifecyclePolicy.toXContent(builder, null); final StringEntity entity = new StringEntity("{ \"policy\":" + Strings.toString(builder) + "}", ContentType.APPLICATION_JSON); diff --git a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ClusterStateWaitThresholdBreachTests.java b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ClusterStateWaitThresholdBreachTests.java index 93534c2bbd79..d3892d6d52f7 100644 --- a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ClusterStateWaitThresholdBreachTests.java +++ b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ClusterStateWaitThresholdBreachTests.java @@ -35,7 +35,6 @@ import org.elasticsearch.xpack.core.ilm.action.ILMActions; import org.elasticsearch.xpack.core.ilm.action.PutLifecycleRequest; import org.junit.Before; -import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Locale; @@ -65,7 +64,7 @@ public class ClusterStateWaitThresholdBreachTests extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Arrays.asList(LocalStateCompositeXPackPlugin.class, IndexLifecycle.class, Ccr.class); + return List.of(LocalStateCompositeXPackPlugin.class, IndexLifecycle.class, Ccr.class); } @Override diff --git a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataTiersMigrationsTests.java b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataTiersMigrationsTests.java index 7a0e00e5c414..6d409bf474cf 100644 --- a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataTiersMigrationsTests.java +++ b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/DataTiersMigrationsTests.java @@ -30,9 +30,8 @@ import org.elasticsearch.xpack.core.ilm.action.ILMActions; import org.elasticsearch.xpack.core.ilm.action.PutLifecycleRequest; import org.junit.Before; -import java.util.Arrays; import java.util.Collection; -import java.util.Collections; +import java.util.List; import java.util.Locale; import java.util.Map; import java.util.concurrent.TimeUnit; @@ -57,7 +56,7 @@ public class DataTiersMigrationsTests extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Arrays.asList(LocalStateCompositeXPackPlugin.class, IndexLifecycle.class); + return List.of(LocalStateCompositeXPackPlugin.class, IndexLifecycle.class); } @Override @@ -100,9 +99,9 @@ public class DataTiersMigrationsTests extends ESIntegTestCase { logger.info("starting a cold data node"); internalCluster().startNode(coldNode(Settings.EMPTY)); - Phase hotPhase = new Phase("hot", TimeValue.ZERO, Collections.emptyMap()); - Phase warmPhase = new Phase("warm", TimeValue.ZERO, Collections.emptyMap()); - Phase coldPhase = new Phase("cold", TimeValue.ZERO, Collections.emptyMap()); + Phase hotPhase = new Phase("hot", TimeValue.ZERO, Map.of()); + Phase warmPhase = new Phase("warm", TimeValue.ZERO, Map.of()); + Phase coldPhase = new Phase("cold", TimeValue.ZERO, Map.of()); LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policy, Map.of("hot", hotPhase, "warm", warmPhase, "cold", coldPhase)); PutLifecycleRequest putLifecycleRequest = new PutLifecycleRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, lifecyclePolicy); assertAcked(client().execute(ILMActions.PUT, putLifecycleRequest).get()); @@ -161,9 +160,9 @@ public class DataTiersMigrationsTests extends ESIntegTestCase { logger.info("starting a cold data node"); internalCluster().startNode(coldNode(Settings.EMPTY)); - Phase hotPhase = new Phase("hot", TimeValue.ZERO, Collections.emptyMap()); - Phase warmPhase = new Phase("warm", TimeValue.ZERO, Collections.emptyMap()); - Phase coldPhase = new Phase("cold", TimeValue.ZERO, Collections.emptyMap()); + Phase hotPhase = new Phase("hot", TimeValue.ZERO, Map.of()); + Phase warmPhase = new Phase("warm", TimeValue.ZERO, Map.of()); + Phase coldPhase = new Phase("cold", TimeValue.ZERO, Map.of()); LifecyclePolicy lifecyclePolicy = new LifecyclePolicy(policy, Map.of("hot", hotPhase, "warm", warmPhase, "cold", coldPhase)); PutLifecycleRequest putLifecycleRequest = new PutLifecycleRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, lifecyclePolicy); assertAcked(client().execute(ILMActions.PUT, putLifecycleRequest).get()); diff --git a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeIT.java b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeIT.java index b443c769407c..2c4c1c9e20bb 100644 --- a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeIT.java +++ b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeIT.java @@ -33,10 +33,9 @@ import org.elasticsearch.xpack.core.ilm.action.ExplainLifecycleAction; import org.elasticsearch.xpack.core.ilm.action.ILMActions; import org.elasticsearch.xpack.core.ilm.action.PutLifecycleRequest; -import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; @@ -49,7 +48,7 @@ public class ILMMultiNodeIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Arrays.asList(LocalStateCompositeXPackPlugin.class, DataStreamsPlugin.class, IndexLifecycle.class, Ccr.class); + return List.of(LocalStateCompositeXPackPlugin.class, DataStreamsPlugin.class, IndexLifecycle.class, Ccr.class); } @Override @@ -69,9 +68,9 @@ public class ILMMultiNodeIT extends ESIntegTestCase { ensureGreen(); RolloverAction rolloverAction = new RolloverAction(null, null, null, 1L, null, null, null, null, null, null); - Phase hotPhase = new Phase("hot", TimeValue.ZERO, Collections.singletonMap(rolloverAction.getWriteableName(), rolloverAction)); + Phase hotPhase = new Phase("hot", TimeValue.ZERO, Map.of(rolloverAction.getWriteableName(), rolloverAction)); ShrinkAction shrinkAction = new ShrinkAction(1, null, false); - Phase warmPhase = new Phase("warm", TimeValue.ZERO, Collections.singletonMap(shrinkAction.getWriteableName(), shrinkAction)); + Phase warmPhase = new Phase("warm", TimeValue.ZERO, Map.of(shrinkAction.getWriteableName(), shrinkAction)); Map phases = new HashMap<>(); phases.put(hotPhase.getName(), hotPhase); phases.put(warmPhase.getName(), warmPhase); @@ -89,7 +88,7 @@ public class ILMMultiNodeIT extends ESIntegTestCase { ); ComposableIndexTemplate template = ComposableIndexTemplate.builder() - .indexPatterns(Collections.singletonList(index)) + .indexPatterns(List.of(index)) .template(t) .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) .build(); @@ -121,12 +120,12 @@ public class ILMMultiNodeIT extends ESIntegTestCase { } public void startHotOnlyNode() { - Settings nodeSettings = Settings.builder().putList("node.roles", Arrays.asList("master", "data_hot", "ingest")).build(); + Settings nodeSettings = Settings.builder().putList("node.roles", List.of("master", "data_hot", "ingest")).build(); internalCluster().startNode(nodeSettings); } public void startWarmOnlyNode() { - Settings nodeSettings = Settings.builder().putList("node.roles", Arrays.asList("master", "data_warm", "ingest")).build(); + Settings nodeSettings = Settings.builder().putList("node.roles", List.of("master", "data_warm", "ingest")).build(); internalCluster().startNode(nodeSettings); } } diff --git a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeWithCCRDisabledIT.java b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeWithCCRDisabledIT.java index e02dd5fe4567..b91a309a23ae 100644 --- a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeWithCCRDisabledIT.java +++ b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/ILMMultiNodeWithCCRDisabledIT.java @@ -34,10 +34,9 @@ import org.elasticsearch.xpack.core.ilm.action.ExplainLifecycleAction; import org.elasticsearch.xpack.core.ilm.action.ILMActions; import org.elasticsearch.xpack.core.ilm.action.PutLifecycleRequest; -import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; @@ -50,7 +49,7 @@ public class ILMMultiNodeWithCCRDisabledIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Arrays.asList(LocalStateCompositeXPackPlugin.class, DataStreamsPlugin.class, IndexLifecycle.class, Ccr.class); + return List.of(LocalStateCompositeXPackPlugin.class, DataStreamsPlugin.class, IndexLifecycle.class, Ccr.class); } @Override @@ -75,7 +74,7 @@ public class ILMMultiNodeWithCCRDisabledIT extends ESIntegTestCase { actions.put(shrinkAction.getWriteableName(), shrinkAction); Phase hotPhase = new Phase("hot", TimeValue.ZERO, actions); - LifecyclePolicy lifecyclePolicy = new LifecyclePolicy("shrink-policy", Collections.singletonMap(hotPhase.getName(), hotPhase)); + LifecyclePolicy lifecyclePolicy = new LifecyclePolicy("shrink-policy", Map.of(hotPhase.getName(), hotPhase)); client().execute(ILMActions.PUT, new PutLifecycleRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, lifecyclePolicy)).get(); Template t = new Template( @@ -89,7 +88,7 @@ public class ILMMultiNodeWithCCRDisabledIT extends ESIntegTestCase { ); ComposableIndexTemplate template = ComposableIndexTemplate.builder() - .indexPatterns(Collections.singletonList(index)) + .indexPatterns(List.of(index)) .template(t) .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate()) .build(); @@ -121,12 +120,12 @@ public class ILMMultiNodeWithCCRDisabledIT extends ESIntegTestCase { } public void startHotOnlyNode() { - Settings nodeSettings = Settings.builder().putList("node.roles", Arrays.asList("master", "data_hot", "ingest")).build(); + Settings nodeSettings = Settings.builder().putList("node.roles", List.of("master", "data_hot", "ingest")).build(); internalCluster().startNode(nodeSettings); } public void startWarmOnlyNode() { - Settings nodeSettings = Settings.builder().putList("node.roles", Arrays.asList("master", "data_warm", "ingest")).build(); + Settings nodeSettings = Settings.builder().putList("node.roles", List.of("master", "data_warm", "ingest")).build(); internalCluster().startNode(nodeSettings); } } diff --git a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/IndexLifecycleInitialisationTests.java b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/IndexLifecycleInitialisationTests.java index 0200388903e5..c6536c6501a5 100644 --- a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/IndexLifecycleInitialisationTests.java +++ b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/IndexLifecycleInitialisationTests.java @@ -56,9 +56,7 @@ import org.junit.Before; import java.io.IOException; import java.time.Instant; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -112,7 +110,7 @@ public class IndexLifecycleInitialisationTests extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Arrays.asList(LocalStateCompositeXPackPlugin.class, IndexLifecycle.class, TestILMPlugin.class); + return List.of(LocalStateCompositeXPackPlugin.class, IndexLifecycle.class, TestILMPlugin.class); } @Before @@ -128,9 +126,9 @@ public class IndexLifecycleInitialisationTests extends ESIntegTestCase { Step.StepKey compKey = new Step.StepKey("mock", "complete", "complete"); steps.add(new ObservableClusterStateWaitStep(key, compKey)); steps.add(new PhaseCompleteStep(compKey, null)); - Map actions = Collections.singletonMap(ObservableAction.NAME, OBSERVABLE_ACTION); + Map actions = Map.of(ObservableAction.NAME, OBSERVABLE_ACTION); mockPhase = new Phase("mock", TimeValue.timeValueSeconds(0), actions); - Map phases = Collections.singletonMap("mock", mockPhase); + Map phases = Map.of("mock", mockPhase); lifecyclePolicy = newLockableLifecyclePolicy("test", phases); } @@ -312,7 +310,7 @@ public class IndexLifecycleInitialisationTests extends ESIntegTestCase { updateIndexSettings(Settings.builder().put("index.lifecycle.test.complete", true), "test"); { - Phase phase = new Phase("mock", TimeValue.ZERO, Collections.singletonMap("TEST_ACTION", OBSERVABLE_ACTION)); + Phase phase = new Phase("mock", TimeValue.ZERO, Map.of("TEST_ACTION", OBSERVABLE_ACTION)); PhaseExecutionInfo expectedExecutionInfo = new PhaseExecutionInfo(lifecyclePolicy.getName(), phase, 1L, actualModifiedDate); assertBusy(() -> { IndexLifecycleExplainResponse indexResponse = executeExplainRequestAndGetTestIndexResponse("test"); @@ -528,12 +526,12 @@ public class IndexLifecycleInitialisationTests extends ESIntegTestCase { Setting.Property.Dynamic, Setting.Property.IndexScope ); - return Collections.singletonList(COMPLETE_SETTING); + return List.of(COMPLETE_SETTING); } @Override public List getNamedXContent() { - return Arrays.asList(new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ObservableAction.NAME), (p) -> { + return List.of(new NamedXContentRegistry.Entry(LifecycleAction.class, new ParseField(ObservableAction.NAME), (p) -> { MockAction.parse(p); return OBSERVABLE_ACTION; })); @@ -541,7 +539,7 @@ public class IndexLifecycleInitialisationTests extends ESIntegTestCase { @Override public List getNamedWriteables() { - return Arrays.asList( + return List.of( new NamedWriteableRegistry.Entry(LifecycleType.class, LockableLifecycleType.TYPE, (in) -> LockableLifecycleType.INSTANCE), new NamedWriteableRegistry.Entry(LifecycleAction.class, ObservableAction.NAME, ObservableAction::readObservableAction), new NamedWriteableRegistry.Entry( diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/cluster/metadata/MetadataMigrateToDataTiersRoutingService.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/cluster/metadata/MetadataMigrateToDataTiersRoutingService.java index a4e2fdd6d5d8..94bb82fadd7c 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/cluster/metadata/MetadataMigrateToDataTiersRoutingService.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/cluster/metadata/MetadataMigrateToDataTiersRoutingService.java @@ -267,7 +267,7 @@ public final class MetadataMigrateToDataTiersRoutingService { ) { IndexLifecycleMetadata currentLifecycleMetadata = projectMetadata.custom(IndexLifecycleMetadata.TYPE); if (currentLifecycleMetadata == null) { - return Collections.emptyList(); + return List.of(); } List migratedPolicies = new ArrayList<>(); @@ -882,7 +882,6 @@ public final class MetadataMigrateToDataTiersRoutingService { this.migratedPolicies = Collections.unmodifiableList(migratedPolicies); this.migratedTemplates = migratedTemplates; } - } /** diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IlmHealthIndicatorService.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IlmHealthIndicatorService.java index ba4835cf72d7..21c9999a7bd8 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IlmHealthIndicatorService.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IlmHealthIndicatorService.java @@ -41,7 +41,6 @@ import org.elasticsearch.xpack.core.ilm.WaitForNoFollowersStep; import org.elasticsearch.xpack.core.ilm.WaitForRolloverReadyStep; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -219,8 +218,8 @@ public class IlmHealthIndicatorService implements HealthIndicatorService { GREEN, "No Index Lifecycle Management policies configured", createDetails(verbose, ilmMetadata, currentMode), - Collections.emptyList(), - Collections.emptyList() + List.of(), + List.of() ); } else if (currentMode != OperationMode.RUNNING) { return createIndicator( @@ -238,8 +237,8 @@ public class IlmHealthIndicatorService implements HealthIndicatorService { GREEN, "Index Lifecycle Management is running", createDetails(verbose, ilmMetadata, currentMode), - Collections.emptyList(), - Collections.emptyList() + List.of(), + List.of() ); } else { return createIndicator( diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleService.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleService.java index b7a408bcb568..f2a407bfd0ff 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleService.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleService.java @@ -57,7 +57,6 @@ import org.elasticsearch.xpack.ilm.history.ILMHistoryStore; import java.io.Closeable; import java.time.Clock; import java.util.Collection; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; @@ -527,7 +526,7 @@ public class IndexLifecycleService SingleNodeShutdownMetadata.Type.REPLACE ); if (shutdownNodes.isEmpty()) { - return Collections.emptySet(); + return Set.of(); } // Returning a set of strings will cause weird behavior with multiple projects diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportGetLifecycleAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportGetLifecycleAction.java index 80a30692c323..d59c57ff3352 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportGetLifecycleAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportGetLifecycleAction.java @@ -33,7 +33,6 @@ import org.elasticsearch.xpack.core.ilm.action.GetLifecycleAction.Response; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -77,7 +76,7 @@ public class TransportGetLifecycleAction extends TransportMasterNodeAction policyMap = new HashMap<>(); policyMap.put( mixedPolicyName, - new LifecyclePolicyMetadata(mixedPolicy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) + new LifecyclePolicyMetadata(mixedPolicy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong()) ); policyMap.put( allClusterPolicyName, - new LifecyclePolicyMetadata(allClusterPolicy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) + new LifecyclePolicyMetadata(allClusterPolicy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong()) ); policyMap.put( invalidPolicyName, - new LifecyclePolicyMetadata(invalidPolicy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) + new LifecyclePolicyMetadata(invalidPolicy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong()) ); policyStepsRegistry = new PolicyStepsRegistry(NamedXContentRegistry.EMPTY, client, null); diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IlmHealthIndicatorServiceTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IlmHealthIndicatorServiceTests.java index 9e2a67caac25..7a37aaba96c1 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IlmHealthIndicatorServiceTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IlmHealthIndicatorServiceTests.java @@ -36,7 +36,6 @@ import org.elasticsearch.xpack.core.ilm.LifecyclePolicyMetadata; import org.elasticsearch.xpack.core.ilm.LifecycleSettings; import java.io.IOException; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -177,7 +176,7 @@ public class IlmHealthIndicatorServiceTests extends ESTestCase { YELLOW, "Index Lifecycle Management is not running", new SimpleHealthIndicatorDetails(Map.of("ilm_status", status, "policies", 1, "stagnating_indices", 0)), - Collections.singletonList( + List.of( new HealthIndicatorImpact( NAME, IlmHealthIndicatorService.AUTOMATION_DISABLED_IMPACT_ID, @@ -251,7 +250,7 @@ public class IlmHealthIndicatorServiceTests extends ESTestCase { YELLOW, "Index Lifecycle Management is not running", HealthIndicatorDetails.EMPTY, - Collections.singletonList( + List.of( new HealthIndicatorImpact( NAME, IlmHealthIndicatorService.AUTOMATION_DISABLED_IMPACT_ID, diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleInfoTransportActionTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleInfoTransportActionTests.java index d81faf6a398d..4e8d7440eb77 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleInfoTransportActionTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleInfoTransportActionTests.java @@ -33,7 +33,6 @@ import org.junit.Before; import org.mockito.Mockito; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -75,18 +74,18 @@ public class IndexLifecycleInfoTransportActionTests extends ESTestCase { indexPolicies.put("index_3", policy1Name); indexPolicies.put("index_4", policy1Name); indexPolicies.put("index_5", policy3Name); - LifecyclePolicy policy1 = new LifecyclePolicy(policy1Name, Collections.emptyMap()); + LifecyclePolicy policy1 = new LifecyclePolicy(policy1Name, Map.of()); policies.add(policy1); - PolicyStats policy1Stats = new PolicyStats(Collections.emptyMap(), 4); + PolicyStats policy1Stats = new PolicyStats(Map.of(), 4); Map phases1 = new HashMap<>(); LifecyclePolicy policy2 = new LifecyclePolicy(policy2Name, phases1); policies.add(policy2); - PolicyStats policy2Stats = new PolicyStats(Collections.emptyMap(), 0); + PolicyStats policy2Stats = new PolicyStats(Map.of(), 0); - LifecyclePolicy policy3 = new LifecyclePolicy(policy3Name, Collections.emptyMap()); + LifecyclePolicy policy3 = new LifecyclePolicy(policy3Name, Map.of()); policies.add(policy3); - PolicyStats policy3Stats = new PolicyStats(Collections.emptyMap(), 1); + PolicyStats policy3Stats = new PolicyStats(Map.of(), 1); ClusterState clusterState = buildClusterState(policies, indexPolicies); Mockito.when(clusterService.state()).thenReturn(clusterState); @@ -110,7 +109,7 @@ public class IndexLifecycleInfoTransportActionTests extends ESTestCase { private ClusterState buildClusterState(List lifecyclePolicies, Map indexPolicies) { Map lifecyclePolicyMetadatasMap = lifecyclePolicies.stream() - .map(p -> new LifecyclePolicyMetadata(p, Collections.emptyMap(), 1, 0L)) + .map(p -> new LifecyclePolicyMetadata(p, Map.of(), 1, 0L)) .collect(Collectors.toMap(LifecyclePolicyMetadata::getName, Function.identity())); IndexLifecycleMetadata indexLifecycleMetadata = new IndexLifecycleMetadata(lifecyclePolicyMetadatasMap, OperationMode.RUNNING); diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleMetadataTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleMetadataTests.java index 0356333c471f..67d471d59229 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleMetadataTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleMetadataTests.java @@ -43,8 +43,6 @@ import org.elasticsearch.xpack.core.ilm.UnfollowAction; import org.elasticsearch.xpack.core.ilm.WaitForSnapshotAction; import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.SortedMap; @@ -62,10 +60,7 @@ public class IndexLifecycleMetadataTests extends ChunkedToXContentDiffableSerial Map policies = Maps.newMapWithExpectedSize(numPolicies); for (int i = 0; i < numPolicies; i++) { LifecyclePolicy policy = randomTimeseriesLifecyclePolicy(randomAlphaOfLength(4) + i); - policies.put( - policy.getName(), - new LifecyclePolicyMetadata(policy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) - ); + policies.put(policy.getName(), new LifecyclePolicyMetadata(policy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong())); } return new IndexLifecycleMetadata(policies, randomFrom(OperationMode.values())); } @@ -83,7 +78,7 @@ public class IndexLifecycleMetadataTests extends ChunkedToXContentDiffableSerial @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { return new NamedWriteableRegistry( - Arrays.asList( + List.of( new NamedWriteableRegistry.Entry( LifecycleType.class, TimeseriesLifecycleType.TYPE, @@ -110,7 +105,7 @@ public class IndexLifecycleMetadataTests extends ChunkedToXContentDiffableSerial protected NamedXContentRegistry xContentRegistry() { List entries = new ArrayList<>(ClusterModule.getNamedXWriteables()); entries.addAll( - Arrays.asList( + List.of( new NamedXContentRegistry.Entry( LifecycleType.class, new ParseField(TimeseriesLifecycleType.TYPE), @@ -154,7 +149,7 @@ public class IndexLifecycleMetadataTests extends ChunkedToXContentDiffableSerial policyName, new LifecyclePolicyMetadata( randomTimeseriesLifecyclePolicy(policyName), - Collections.emptyMap(), + Map.of(), randomNonNegativeLong(), randomNonNegativeLong() ) @@ -191,9 +186,9 @@ public class IndexLifecycleMetadataTests extends ChunkedToXContentDiffableSerial Map phases = Maps.newMapWithExpectedSize(numberPhases); for (int j = 0; j < numberPhases; j++) { TimeValue after = randomTimeValue(0, 1_000_000_000, TimeUnit.SECONDS, TimeUnit.MINUTES, TimeUnit.HOURS, TimeUnit.DAYS); - Map actions = Collections.emptyMap(); + Map actions = Map.of(); if (randomBoolean()) { - actions = Collections.singletonMap(DeleteAction.NAME, DeleteAction.WITH_SNAPSHOT_DELETE); + actions = Map.of(DeleteAction.NAME, DeleteAction.WITH_SNAPSHOT_DELETE); } String phaseName = randomAlphaOfLength(10); phases.put(phaseName, new Phase(phaseName, after, actions)); @@ -203,7 +198,7 @@ public class IndexLifecycleMetadataTests extends ChunkedToXContentDiffableSerial policyName, new LifecyclePolicyMetadata( newTestLifecyclePolicy(policyName, phases), - Collections.emptyMap(), + Map.of(), randomNonNegativeLong(), randomNonNegativeLong() ) diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleRunnerTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleRunnerTests.java index 2aa90ee07430..f3a9d3155282 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleRunnerTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleRunnerTests.java @@ -73,8 +73,6 @@ import org.mockito.Mockito; import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -89,7 +87,6 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.function.BiFunction; -import static java.util.stream.Collectors.toList; import static org.elasticsearch.cluster.metadata.LifecycleExecutionState.ILM_CUSTOM_METADATA_KEY; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.awaitLatch; import static org.elasticsearch.xpack.core.ilm.LifecycleSettings.LIFECYCLE_HISTORY_INDEX_ENABLED_SETTING; @@ -248,7 +245,7 @@ public class IndexLifecycleRunnerTests extends ESTestCase { List waitForRolloverStepList = action.toSteps(client, phaseName, null) .stream() .filter(s -> s.getKey().name().equals(WaitForRolloverReadyStep.NAME)) - .collect(toList()); + .toList(); assertThat(waitForRolloverStepList.size(), is(1)); Step waitForRolloverStep = waitForRolloverStepList.get(0); StepKey stepKey = waitForRolloverStep.getKey(); @@ -288,7 +285,7 @@ public class IndexLifecycleRunnerTests extends ESTestCase { .build(); ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); DiscoveryNode node = clusterService.localNode(); - IndexLifecycleMetadata ilm = new IndexLifecycleMetadata(Collections.emptyMap(), OperationMode.RUNNING); + IndexLifecycleMetadata ilm = new IndexLifecycleMetadata(Map.of(), OperationMode.RUNNING); ClusterState state = ClusterState.builder(new ClusterName("cluster")) .metadata(Metadata.builder().put(indexMetadata, true).putCustom(IndexLifecycleMetadata.TYPE, ilm)) .nodes(DiscoveryNodes.builder().add(node).masterNodeId(node.getId()).localNodeId(node.getId())) @@ -317,7 +314,7 @@ public class IndexLifecycleRunnerTests extends ESTestCase { StepKey nextStepKey = new StepKey("phase", "action", "next_cluster_state_action_step"); MockClusterStateActionStep step = new MockClusterStateActionStep(stepKey, nextStepKey); MockClusterStateActionStep nextStep = new MockClusterStateActionStep(nextStepKey, null); - MockPolicyStepsRegistry stepRegistry = createMultiStepPolicyStepRegistry(policyName, Arrays.asList(step, nextStep)); + MockPolicyStepsRegistry stepRegistry = createMultiStepPolicyStepRegistry(policyName, List.of(step, nextStep)); stepRegistry.setResolver((i, k) -> { if (stepKey.equals(k)) { return step; @@ -340,7 +337,7 @@ public class IndexLifecycleRunnerTests extends ESTestCase { .build(); ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); DiscoveryNode node = clusterService.localNode(); - IndexLifecycleMetadata ilm = new IndexLifecycleMetadata(Collections.emptyMap(), OperationMode.RUNNING); + IndexLifecycleMetadata ilm = new IndexLifecycleMetadata(Map.of(), OperationMode.RUNNING); ClusterState state = ClusterState.builder(new ClusterName("cluster")) .metadata(Metadata.builder().put(indexMetadata, true).putCustom(IndexLifecycleMetadata.TYPE, ilm)) .nodes(DiscoveryNodes.builder().add(node).masterNodeId(node.getId()).localNodeId(node.getId())) @@ -428,7 +425,7 @@ public class IndexLifecycleRunnerTests extends ESTestCase { .build(); ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); DiscoveryNode node = clusterService.localNode(); - IndexLifecycleMetadata ilm = new IndexLifecycleMetadata(Collections.emptyMap(), OperationMode.RUNNING); + IndexLifecycleMetadata ilm = new IndexLifecycleMetadata(Map.of(), OperationMode.RUNNING); ClusterState state = ClusterState.builder(new ClusterName("cluster")) .metadata(Metadata.builder().put(indexMetadata, true).putCustom(IndexLifecycleMetadata.TYPE, ilm)) .nodes(DiscoveryNodes.builder().add(node).masterNodeId(node.getId()).localNodeId(node.getId())) @@ -478,7 +475,7 @@ public class IndexLifecycleRunnerTests extends ESTestCase { .build(); ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); DiscoveryNode node = clusterService.localNode(); - IndexLifecycleMetadata ilm = new IndexLifecycleMetadata(Collections.emptyMap(), OperationMode.RUNNING); + IndexLifecycleMetadata ilm = new IndexLifecycleMetadata(Map.of(), OperationMode.RUNNING); ClusterState state = ClusterState.builder(new ClusterName("cluster")) .metadata(Metadata.builder().put(indexMetadata, true).putCustom(IndexLifecycleMetadata.TYPE, ilm)) .nodes(DiscoveryNodes.builder().add(node).masterNodeId(node.getId()).localNodeId(node.getId())) @@ -505,7 +502,7 @@ public class IndexLifecycleRunnerTests extends ESTestCase { StepKey nextStepKey = new StepKey("phase", "action", "async_action_step"); MockClusterStateActionStep step = new MockClusterStateActionStep(stepKey, nextStepKey); MockAsyncActionStep nextStep = new MockAsyncActionStep(nextStepKey, null); - MockPolicyStepsRegistry stepRegistry = createMultiStepPolicyStepRegistry(policyName, Arrays.asList(step, nextStep)); + MockPolicyStepsRegistry stepRegistry = createMultiStepPolicyStepRegistry(policyName, List.of(step, nextStep)); stepRegistry.setResolver((i, k) -> { if (stepKey.equals(k)) { return step; @@ -528,7 +525,7 @@ public class IndexLifecycleRunnerTests extends ESTestCase { .build(); ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); DiscoveryNode node = clusterService.localNode(); - IndexLifecycleMetadata ilm = new IndexLifecycleMetadata(Collections.emptyMap(), OperationMode.RUNNING); + IndexLifecycleMetadata ilm = new IndexLifecycleMetadata(Map.of(), OperationMode.RUNNING); ClusterState state = ClusterState.builder(new ClusterName("cluster")) .metadata(Metadata.builder().put(indexMetadata, true).putCustom(IndexLifecycleMetadata.TYPE, ilm)) .nodes(DiscoveryNodes.builder().add(node).masterNodeId(node.getId()).localNodeId(node.getId())) @@ -605,7 +602,7 @@ public class IndexLifecycleRunnerTests extends ESTestCase { .build(); ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); DiscoveryNode node = clusterService.localNode(); - IndexLifecycleMetadata ilm = new IndexLifecycleMetadata(Collections.emptyMap(), OperationMode.RUNNING); + IndexLifecycleMetadata ilm = new IndexLifecycleMetadata(Map.of(), OperationMode.RUNNING); ClusterState state = ClusterState.builder(new ClusterName("cluster")) .metadata(Metadata.builder().put(indexMetadata, true).putCustom(IndexLifecycleMetadata.TYPE, ilm)) .nodes(DiscoveryNodes.builder().add(node).masterNodeId(node.getId()).localNodeId(node.getId())) @@ -787,7 +784,7 @@ public class IndexLifecycleRunnerTests extends ESTestCase { Client client = mock(Client.class); when(client.settings()).thenReturn(Settings.EMPTY); LifecyclePolicy policy = LifecyclePolicyTests.randomTimeseriesLifecyclePolicyWithAllPhases(policyName); - LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata(policy, Collections.emptyMap(), 1, randomNonNegativeLong()); + LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata(policy, Map.of(), 1, randomNonNegativeLong()); String phaseName = randomFrom(policy.getPhases().keySet()); Phase phase = policy.getPhases().get(phaseName); PhaseExecutionInfo pei = new PhaseExecutionInfo(policy.getName(), phase, 1, randomNonNegativeLong()); @@ -826,7 +823,7 @@ public class IndexLifecycleRunnerTests extends ESTestCase { StepKey stepKey = new StepKey("phase", MockAction.NAME, MockAction.NAME); MockAsyncActionStep step = new MockAsyncActionStep(stepKey, null); SortedMap lifecyclePolicyMap = new TreeMap<>( - Collections.singletonMap( + Map.of( policyName, new LifecyclePolicyMetadata( createPolicy(policyName, null, step.getKey()), @@ -836,9 +833,9 @@ public class IndexLifecycleRunnerTests extends ESTestCase { ) ) ); - Map firstStepMap = Collections.singletonMap(policyName, step); - Map policySteps = Collections.singletonMap(step.getKey(), step); - Map> stepMap = Collections.singletonMap(policyName, policySteps); + Map firstStepMap = Map.of(policyName, step); + Map policySteps = Map.of(step.getKey(), step); + Map> stepMap = Map.of(policyName, policySteps); PolicyStepsRegistry policyStepsRegistry = new PolicyStepsRegistry( lifecyclePolicyMap, firstStepMap, @@ -899,7 +896,7 @@ public class IndexLifecycleRunnerTests extends ESTestCase { assert unsafeStep == null || safeStep.phase().equals(unsafeStep.phase()) == false : "safe and unsafe actions must be in different phases"; Map actions = new HashMap<>(); - List steps = Collections.singletonList(new MockStep(safeStep, null)); + List steps = List.of(new MockStep(safeStep, null)); MockAction safeAction = new MockAction(steps, true); actions.put(safeAction.getWriteableName(), safeAction); Phase phase = new Phase(safeStep.phase(), TimeValue.timeValueMillis(0), actions); @@ -908,7 +905,7 @@ public class IndexLifecycleRunnerTests extends ESTestCase { if (unsafeStep != null) { assert MockAction.NAME.equals(unsafeStep.action()) : "The unsafe action needs to be MockAction.NAME"; Map actions = new HashMap<>(); - List steps = Collections.singletonList(new MockStep(unsafeStep, null)); + List steps = List.of(new MockStep(unsafeStep, null)); MockAction unsafeAction = new MockAction(steps, false); actions.put(unsafeAction.getWriteableName(), unsafeAction); Phase phase = new Phase(unsafeStep.phase(), TimeValue.timeValueMillis(0), actions); @@ -1235,7 +1232,7 @@ public class IndexLifecycleRunnerTests extends ESTestCase { } public static MockPolicyStepsRegistry createOneStepPolicyStepRegistry(String policyName, Step step) { - return createMultiStepPolicyStepRegistry(policyName, Collections.singletonList(step)); + return createMultiStepPolicyStepRegistry(policyName, List.of(step)); } public static MockPolicyStepsRegistry createMultiStepPolicyStepRegistry(String policyName, List steps) { diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java index eceb81542377..b77e643bc285 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleServiceTests.java @@ -58,9 +58,9 @@ import org.mockito.Mockito; import java.time.Clock; import java.time.Instant; import java.time.ZoneId; -import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import java.util.UUID; @@ -114,7 +114,7 @@ public class IndexLifecycleServiceTests extends ESTestCase { }).when(executorService).execute(any()); Settings settings = Settings.builder().put(LifecycleSettings.LIFECYCLE_POLL_INTERVAL, "1s").build(); when(clusterService.getClusterSettings()).thenReturn( - new ClusterSettings(settings, Collections.singleton(LifecycleSettings.LIFECYCLE_POLL_INTERVAL_SETTING)) + new ClusterSettings(settings, Set.of(LifecycleSettings.LIFECYCLE_POLL_INTERVAL_SETTING)) ); when(clusterService.lifecycleState()).thenReturn(State.STARTED); @@ -154,14 +154,11 @@ public class IndexLifecycleServiceTests extends ESTestCase { randomStepKey(), randomStepKey() ); - MockAction mockAction = new MockAction(Collections.singletonList(mockStep)); - Phase phase = new Phase("phase", TimeValue.ZERO, Collections.singletonMap("action", mockAction)); - LifecyclePolicy policy = newTestLifecyclePolicy(policyName, Collections.singletonMap(phase.getName(), phase)); + MockAction mockAction = new MockAction(List.of(mockStep)); + Phase phase = new Phase("phase", TimeValue.ZERO, Map.of("action", mockAction)); + LifecyclePolicy policy = newTestLifecyclePolicy(policyName, Map.of(phase.getName(), phase)); SortedMap policyMap = new TreeMap<>(); - policyMap.put( - policyName, - new LifecyclePolicyMetadata(policy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) - ); + policyMap.put(policyName, new LifecyclePolicyMetadata(policy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong())); Index index = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20)); IndexMetadata indexMetadata = IndexMetadata.builder(index.getName()) .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policyName)) @@ -191,14 +188,11 @@ public class IndexLifecycleServiceTests extends ESTestCase { mockShrinkStep, randomStepKey() ); - MockAction mockAction = new MockAction(Collections.singletonList(mockStep)); - Phase phase = new Phase("phase", TimeValue.ZERO, Collections.singletonMap("action", mockAction)); - LifecyclePolicy policy = newTestLifecyclePolicy(policyName, Collections.singletonMap(phase.getName(), phase)); + MockAction mockAction = new MockAction(List.of(mockStep)); + Phase phase = new Phase("phase", TimeValue.ZERO, Map.of("action", mockAction)); + LifecyclePolicy policy = newTestLifecyclePolicy(policyName, Map.of(phase.getName(), phase)); SortedMap policyMap = new TreeMap<>(); - policyMap.put( - policyName, - new LifecyclePolicyMetadata(policy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) - ); + policyMap.put(policyName, new LifecyclePolicyMetadata(policy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong())); Index index = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20)); LifecycleExecutionState.Builder lifecycleState = LifecycleExecutionState.builder(); lifecycleState.setPhase(mockShrinkStep.phase()); @@ -250,14 +244,11 @@ public class IndexLifecycleServiceTests extends ESTestCase { mockShrinkStep, randomStepKey() ); - MockAction mockAction = new MockAction(Collections.singletonList(mockStep)); - Phase phase = new Phase("phase", TimeValue.ZERO, Collections.singletonMap("action", mockAction)); - LifecyclePolicy policy = newTestLifecyclePolicy(policyName, Collections.singletonMap(phase.getName(), phase)); + MockAction mockAction = new MockAction(List.of(mockStep)); + Phase phase = new Phase("phase", TimeValue.ZERO, Map.of("action", mockAction)); + LifecyclePolicy policy = newTestLifecyclePolicy(policyName, Map.of(phase.getName(), phase)); SortedMap policyMap = new TreeMap<>(); - policyMap.put( - policyName, - new LifecyclePolicyMetadata(policy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) - ); + policyMap.put(policyName, new LifecyclePolicyMetadata(policy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong())); Index index = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20)); LifecycleExecutionState.Builder lifecycleState = LifecycleExecutionState.builder(); lifecycleState.setPhase(mockShrinkStep.phase()); @@ -301,14 +292,11 @@ public class IndexLifecycleServiceTests extends ESTestCase { currentStepKey, randomStepKey() ); - MockAction mockAction = new MockAction(Collections.singletonList(mockStep)); - Phase phase = new Phase("phase", TimeValue.ZERO, Collections.singletonMap("action", mockAction)); - LifecyclePolicy policy = newTestLifecyclePolicy(policyName, Collections.singletonMap(phase.getName(), phase)); + MockAction mockAction = new MockAction(List.of(mockStep)); + Phase phase = new Phase("phase", TimeValue.ZERO, Map.of("action", mockAction)); + LifecyclePolicy policy = newTestLifecyclePolicy(policyName, Map.of(phase.getName(), phase)); SortedMap policyMap = new TreeMap<>(); - policyMap.put( - policyName, - new LifecyclePolicyMetadata(policy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) - ); + policyMap.put(policyName, new LifecyclePolicyMetadata(policy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong())); Index index = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20)); LifecycleExecutionState.Builder lifecycleState = LifecycleExecutionState.builder(); lifecycleState.setPhase(currentStepKey.phase()); @@ -370,9 +358,9 @@ public class IndexLifecycleServiceTests extends ESTestCase { } else { i1mockStep = new IndexLifecycleRunnerTests.MockClusterStateActionStep(i1currentStepKey, randomStepKey()); } - MockAction i1mockAction = new MockAction(Collections.singletonList(i1mockStep)); - Phase i1phase = new Phase("phase", TimeValue.ZERO, Collections.singletonMap("action", i1mockAction)); - LifecyclePolicy i1policy = newTestLifecyclePolicy(policy1, Collections.singletonMap(i1phase.getName(), i1phase)); + MockAction i1mockAction = new MockAction(List.of(i1mockStep)); + Phase i1phase = new Phase("phase", TimeValue.ZERO, Map.of("action", i1mockAction)); + LifecyclePolicy i1policy = newTestLifecyclePolicy(policy1, Map.of(i1phase.getName(), i1phase)); Index index1 = new Index(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20)); LifecycleExecutionState.Builder i1lifecycleState = LifecycleExecutionState.builder(); i1lifecycleState.setPhase(i1currentStepKey.phase()); @@ -387,9 +375,9 @@ public class IndexLifecycleServiceTests extends ESTestCase { } else { i2mockStep = new IndexLifecycleRunnerTests.MockClusterStateActionStep(i2currentStepKey, randomStepKey()); } - MockAction mockAction = new MockAction(Collections.singletonList(i2mockStep)); - Phase i2phase = new Phase("phase", TimeValue.ZERO, Collections.singletonMap("action", mockAction)); - LifecyclePolicy i2policy = newTestLifecyclePolicy(policy1, Collections.singletonMap(i2phase.getName(), i1phase)); + MockAction mockAction = new MockAction(List.of(i2mockStep)); + Phase i2phase = new Phase("phase", TimeValue.ZERO, Map.of("action", mockAction)); + LifecyclePolicy i2policy = newTestLifecyclePolicy(policy1, Map.of(i2phase.getName(), i1phase)); Index index2 = new Index( randomValueOtherThan(index1.getName(), () -> randomAlphaOfLengthBetween(1, 20)), randomAlphaOfLengthBetween(1, 20) @@ -422,14 +410,8 @@ public class IndexLifecycleServiceTests extends ESTestCase { } SortedMap policyMap = new TreeMap<>(); - policyMap.put( - policy1, - new LifecyclePolicyMetadata(i1policy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) - ); - policyMap.put( - policy2, - new LifecyclePolicyMetadata(i2policy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) - ); + policyMap.put(policy1, new LifecyclePolicyMetadata(i1policy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong())); + policyMap.put(policy2, new LifecyclePolicyMetadata(i2policy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong())); IndexMetadata i1indexMetadata = IndexMetadata.builder(index1.getName()) .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, policy1)) @@ -533,14 +515,8 @@ public class IndexLifecycleServiceTests extends ESTestCase { SingleNodeShutdownMetadata.Type.REPLACE )) { ClusterState state = ClusterState.builder(ClusterName.DEFAULT).build(); - assertThat( - IndexLifecycleService.indicesOnShuttingDownNodesInDangerousStep(state, "regular_node"), - equalTo(Collections.emptySet()) - ); - assertThat( - IndexLifecycleService.indicesOnShuttingDownNodesInDangerousStep(state, "shutdown_node"), - equalTo(Collections.emptySet()) - ); + assertThat(IndexLifecycleService.indicesOnShuttingDownNodesInDangerousStep(state, "regular_node"), equalTo(Set.of())); + assertThat(IndexLifecycleService.indicesOnShuttingDownNodesInDangerousStep(state, "shutdown_node"), equalTo(Set.of())); IndexMetadata nonDangerousIndex = IndexMetadata.builder("no_danger") .settings(settings(IndexVersion.current()).put(LifecycleSettings.LIFECYCLE_NAME, "mypolicy")) @@ -583,7 +559,7 @@ public class IndexLifecycleServiceTests extends ESTestCase { Map indices = Map.of("no_danger", nonDangerousIndex, "danger", dangerousIndex); Metadata metadata = Metadata.builder() - .putCustom(IndexLifecycleMetadata.TYPE, new IndexLifecycleMetadata(Collections.emptyMap(), OperationMode.RUNNING)) + .putCustom(IndexLifecycleMetadata.TYPE, new IndexLifecycleMetadata(Map.of(), OperationMode.RUNNING)) .indices(indices) .persistentSettings(settings(IndexVersion.current()).build()) .build(); @@ -612,14 +588,8 @@ public class IndexLifecycleServiceTests extends ESTestCase { .build(); // No danger yet, because no node is shutting down - assertThat( - IndexLifecycleService.indicesOnShuttingDownNodesInDangerousStep(state, "regular_node"), - equalTo(Collections.emptySet()) - ); - assertThat( - IndexLifecycleService.indicesOnShuttingDownNodesInDangerousStep(state, "shutdown_node"), - equalTo(Collections.emptySet()) - ); + assertThat(IndexLifecycleService.indicesOnShuttingDownNodesInDangerousStep(state, "regular_node"), equalTo(Set.of())); + assertThat(IndexLifecycleService.indicesOnShuttingDownNodesInDangerousStep(state, "shutdown_node"), equalTo(Set.of())); state = ClusterState.builder(state) .metadata( @@ -627,7 +597,7 @@ public class IndexLifecycleServiceTests extends ESTestCase { .putCustom( NodesShutdownMetadata.TYPE, new NodesShutdownMetadata( - Collections.singletonMap( + Map.of( "shutdown_node", SingleNodeShutdownMetadata.builder() .setNodeId("shutdown_node") @@ -642,15 +612,12 @@ public class IndexLifecycleServiceTests extends ESTestCase { ) .build(); - assertThat( - IndexLifecycleService.indicesOnShuttingDownNodesInDangerousStep(state, "regular_node"), - equalTo(Collections.emptySet()) - ); + assertThat(IndexLifecycleService.indicesOnShuttingDownNodesInDangerousStep(state, "regular_node"), equalTo(Set.of())); // No danger, because this is a "RESTART" type shutdown assertThat( "restart type shutdowns are not considered dangerous", IndexLifecycleService.indicesOnShuttingDownNodesInDangerousStep(state, "shutdown_node"), - equalTo(Collections.emptySet()) + equalTo(Set.of()) ); final String targetNodeName = type == SingleNodeShutdownMetadata.Type.REPLACE ? randomAlphaOfLengthBetween(10, 20) : null; @@ -661,7 +628,7 @@ public class IndexLifecycleServiceTests extends ESTestCase { .putCustom( NodesShutdownMetadata.TYPE, new NodesShutdownMetadata( - Collections.singletonMap( + Map.of( "shutdown_node", SingleNodeShutdownMetadata.builder() .setNodeId("shutdown_node") @@ -679,10 +646,7 @@ public class IndexLifecycleServiceTests extends ESTestCase { .build(); // The dangerous index should be calculated as being in danger now - assertThat( - IndexLifecycleService.indicesOnShuttingDownNodesInDangerousStep(state, "shutdown_node"), - equalTo(Collections.singleton("danger")) - ); + assertThat(IndexLifecycleService.indicesOnShuttingDownNodesInDangerousStep(state, "shutdown_node"), equalTo(Set.of("danger"))); } } } diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleTransitionTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleTransitionTests.java index 7d96c2ad5738..26cc9735b0ee 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleTransitionTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/IndexLifecycleTransitionTests.java @@ -48,7 +48,6 @@ import org.elasticsearch.xpack.core.ilm.WaitForRolloverReadyStep; import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -80,8 +79,8 @@ public class IndexLifecycleTransitionTests extends ESTestCase { .stream() .findFirst() .orElseThrow(() -> new AssertionError("expected next phase to be present")); - List policyMetadatas = Collections.singletonList( - new LifecyclePolicyMetadata(policy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) + List policyMetadatas = List.of( + new LifecyclePolicyMetadata(policy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong()) ); Step.StepKey currentStep = new Step.StepKey("current_phase", "current_action", "current_step"); Step.StepKey nextStep = new Step.StepKey(nextPhase.getName(), "next_action", "next_step"); @@ -128,8 +127,8 @@ public class IndexLifecycleTransitionTests extends ESTestCase { p -> p.getPhases().isEmpty(), () -> LifecyclePolicyTests.randomTestLifecyclePolicy("policy") ); - List policyMetadatas = Collections.singletonList( - new LifecyclePolicyMetadata(policy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) + List policyMetadatas = List.of( + new LifecyclePolicyMetadata(policy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong()) ); Step.StepKey currentStep = new Step.StepKey("current_phase", "current_action", "current_step"); Step.StepKey nextStep = new Step.StepKey("current_phase", "next_action", "next_step"); @@ -179,8 +178,8 @@ public class IndexLifecycleTransitionTests extends ESTestCase { p -> p.getPhases().isEmpty(), () -> LifecyclePolicyTests.randomTestLifecyclePolicy("policy") ); - List policyMetadatas = Collections.singletonList( - new LifecyclePolicyMetadata(policy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) + List policyMetadatas = List.of( + new LifecyclePolicyMetadata(policy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong()) ); Step.StepKey currentStep = new Step.StepKey("current_phase", "current_action", "current_step"); Step.StepKey nextStep = new Step.StepKey("current_phase", "current_action", "next_step"); @@ -236,8 +235,8 @@ public class IndexLifecycleTransitionTests extends ESTestCase { .stream() .findFirst() .orElseThrow(() -> new AssertionError("expected next phase to be present")); - List policyMetadatas = Collections.singletonList( - new LifecyclePolicyMetadata(policy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) + List policyMetadatas = List.of( + new LifecyclePolicyMetadata(policy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong()) ); Step.StepKey currentStepKey = new Step.StepKey("current_phase", "current_action", "current_step"); Step.StepKey nextStepKey = new Step.StepKey(nextPhase.getName(), "next_action", "next_step"); @@ -279,7 +278,7 @@ public class IndexLifecycleTransitionTests extends ESTestCase { lifecycleState.setAction(currentStepKey.action()); lifecycleState.setStep(currentStepKey.name()); - ClusterState clusterState = buildClusterState(indexName, indexSettingsBuilder, lifecycleState.build(), Collections.emptyList()); + ClusterState clusterState = buildClusterState(indexName, indexSettingsBuilder, lifecycleState.build(), List.of()); Index index = clusterState.metadata().getProject().index(indexName).getIndex(); IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, @@ -303,7 +302,7 @@ public class IndexLifecycleTransitionTests extends ESTestCase { lifecycleState.setAction(currentStepKey.action()); lifecycleState.setStep(currentStepKey.name()); - ClusterState clusterState = buildClusterState(indexName, indexSettingsBuilder, lifecycleState.build(), Collections.emptyList()); + ClusterState clusterState = buildClusterState(indexName, indexSettingsBuilder, lifecycleState.build(), List.of()); Index index = clusterState.metadata().getProject().index(indexName).getIndex(); IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, @@ -325,7 +324,7 @@ public class IndexLifecycleTransitionTests extends ESTestCase { lifecycleState.setPhase(currentStep.phase()); lifecycleState.setAction(currentStep.action()); lifecycleState.setStep(currentStep.name()); - ClusterState clusterState = buildClusterState(indexName, Settings.builder(), lifecycleState.build(), Collections.emptyList()); + ClusterState clusterState = buildClusterState(indexName, Settings.builder(), lifecycleState.build(), List.of()); Index index = clusterState.metadata().getProject().index(indexName).getIndex(); ClusterState newClusterState = IndexLifecycleTransition.moveClusterStateToErrorStep( @@ -359,7 +358,7 @@ public class IndexLifecycleTransitionTests extends ESTestCase { lifecycleState.setPhase(currentStep.phase()); lifecycleState.setAction(currentStep.action()); lifecycleState.setStep(currentStep.name()); - ClusterState clusterState = buildClusterState(indexName, Settings.builder(), lifecycleState.build(), Collections.emptyList()); + ClusterState clusterState = buildClusterState(indexName, Settings.builder(), lifecycleState.build(), List.of()); Index index = clusterState.metadata().getProject().index(indexName).getIndex(); ClusterState newClusterState = IndexLifecycleTransition.addStepInfoToClusterState(index, clusterState, stepInfo); assertClusterStateStepInfo(clusterState, index, currentStep, newClusterState, stepInfo); @@ -378,9 +377,7 @@ public class IndexLifecycleTransitionTests extends ESTestCase { lifecycleState.setAction(currentStep.action()); lifecycleState.setStep(currentStep.name()); List policyMetadatas = new ArrayList<>(); - policyMetadatas.add( - new LifecyclePolicyMetadata(oldPolicy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) - ); + policyMetadatas.add(new LifecyclePolicyMetadata(oldPolicy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong())); ClusterState clusterState = buildClusterState(indexName, indexSettingsBuilder, lifecycleState.build(), policyMetadatas); Index index = clusterState.metadata().getProject().index(indexName).getIndex(); Index[] indices = new Index[] { index }; @@ -399,7 +396,7 @@ public class IndexLifecycleTransitionTests extends ESTestCase { indexName, indexSettingsBuilder, LifecycleExecutionState.builder().build(), - Collections.emptyList() + List.of() ); Index index = clusterState.metadata().getProject().index(indexName).getIndex(); Index[] indices = new Index[] { index }; @@ -414,7 +411,7 @@ public class IndexLifecycleTransitionTests extends ESTestCase { public void testRemovePolicyForIndexIndexDoesntExist() { String indexName = randomAlphaOfLength(10); String oldPolicyName = "old_policy"; - LifecyclePolicy oldPolicy = newTestLifecyclePolicy(oldPolicyName, Collections.emptyMap()); + LifecyclePolicy oldPolicy = newTestLifecyclePolicy(oldPolicyName, Map.of()); Step.StepKey currentStep = AbstractStepTestCase.randomStepKey(); Settings.Builder indexSettingsBuilder = Settings.builder().put(LifecycleSettings.LIFECYCLE_NAME, oldPolicyName); LifecycleExecutionState.Builder lifecycleState = LifecycleExecutionState.builder(); @@ -422,9 +419,7 @@ public class IndexLifecycleTransitionTests extends ESTestCase { lifecycleState.setAction(currentStep.action()); lifecycleState.setStep(currentStep.name()); List policyMetadatas = new ArrayList<>(); - policyMetadatas.add( - new LifecyclePolicyMetadata(oldPolicy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) - ); + policyMetadatas.add(new LifecyclePolicyMetadata(oldPolicy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong())); ClusterState clusterState = buildClusterState(indexName, indexSettingsBuilder, lifecycleState.build(), policyMetadatas); Index index = new Index("doesnt_exist", "im_not_here"); Index[] indices = new Index[] { index }; @@ -448,9 +443,7 @@ public class IndexLifecycleTransitionTests extends ESTestCase { lifecycleState.setAction(currentStep.action()); lifecycleState.setStep(currentStep.name()); List policyMetadatas = new ArrayList<>(); - policyMetadatas.add( - new LifecyclePolicyMetadata(oldPolicy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) - ); + policyMetadatas.add(new LifecyclePolicyMetadata(oldPolicy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong())); ClusterState clusterState = buildClusterState(indexName, indexSettingsBuilder, lifecycleState.build(), policyMetadatas); Index index = clusterState.metadata().getProject().index(indexName).getIndex(); Index[] indices = new Index[] { index }; @@ -475,9 +468,7 @@ public class IndexLifecycleTransitionTests extends ESTestCase { lifecycleState.setAction(currentStep.action()); lifecycleState.setStep(currentStep.name()); List policyMetadatas = new ArrayList<>(); - policyMetadatas.add( - new LifecyclePolicyMetadata(oldPolicy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) - ); + policyMetadatas.add(new LifecyclePolicyMetadata(oldPolicy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong())); ClusterState clusterState = buildClusterState(indexName, indexSettingsBuilder, lifecycleState.build(), policyMetadatas); Index index = clusterState.metadata().getProject().index(indexName).getIndex(); Index[] indices = new Index[] { index }; @@ -756,7 +747,7 @@ public class IndexLifecycleTransitionTests extends ESTestCase { LifecyclePolicy policy = createPolicy(policyName, failedStepKey, null); LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata( policy, - Collections.emptyMap(), + Map.of(), randomNonNegativeLong(), randomNonNegativeLong() ); @@ -771,12 +762,7 @@ public class IndexLifecycleTransitionTests extends ESTestCase { lifecycleState.setStep(errorStepKey.name()); lifecycleState.setStepTime(now); lifecycleState.setFailedStep(failedStepKey.name()); - ClusterState clusterState = buildClusterState( - indexName, - indexSettingsBuilder, - lifecycleState.build(), - Collections.singletonList(policyMetadata) - ); + ClusterState clusterState = buildClusterState(indexName, indexSettingsBuilder, lifecycleState.build(), List.of(policyMetadata)); Index index = clusterState.metadata().getProject().index(indexName).getIndex(); ClusterState nextClusterState = IndexLifecycleTransition.moveClusterStateToPreviouslyFailedStep( clusterState, @@ -802,7 +788,7 @@ public class IndexLifecycleTransitionTests extends ESTestCase { LifecyclePolicy policy = createPolicy(policyName, failedStepKey, null); LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata( policy, - Collections.emptyMap(), + Map.of(), randomNonNegativeLong(), randomNonNegativeLong() ); @@ -817,12 +803,7 @@ public class IndexLifecycleTransitionTests extends ESTestCase { lifecycleState.setStep(errorStepKey.name()); lifecycleState.setStepTime(now); lifecycleState.setFailedStep(failedStepKey.name()); - ClusterState clusterState = buildClusterState( - indexName, - indexSettingsBuilder, - lifecycleState.build(), - Collections.singletonList(policyMetadata) - ); + ClusterState clusterState = buildClusterState(indexName, indexSettingsBuilder, lifecycleState.build(), List.of(policyMetadata)); IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, () -> IndexLifecycleTransition.moveClusterStateToPreviouslyFailedStep(clusterState, indexName, () -> now, policyRegistry, false) @@ -840,7 +821,7 @@ public class IndexLifecycleTransitionTests extends ESTestCase { existingIndexName, Settings.builder(), LifecycleExecutionState.builder().build(), - Collections.emptyList() + List.of() ); IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, @@ -863,7 +844,7 @@ public class IndexLifecycleTransitionTests extends ESTestCase { lifecycleState.setAction(errorStepKey.action()); lifecycleState.setStep(errorStepKey.name()); lifecycleState.setFailedStep(failedStepKey.name()); - ClusterState clusterState = buildClusterState(indexName, indexSettingsBuilder, lifecycleState.build(), Collections.emptyList()); + ClusterState clusterState = buildClusterState(indexName, indexSettingsBuilder, lifecycleState.build(), List.of()); IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, () -> IndexLifecycleTransition.moveClusterStateToPreviouslyFailedStep(clusterState, indexName, () -> now, policyRegistry, false) @@ -883,7 +864,7 @@ public class IndexLifecycleTransitionTests extends ESTestCase { lifecycleState.setPhase(failedStepKey.phase()); lifecycleState.setAction(failedStepKey.action()); lifecycleState.setStep(failedStepKey.name()); - ClusterState clusterState = buildClusterState(indexName, indexSettingsBuilder, lifecycleState.build(), Collections.emptyList()); + ClusterState clusterState = buildClusterState(indexName, indexSettingsBuilder, lifecycleState.build(), List.of()); IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, () -> IndexLifecycleTransition.moveClusterStateToPreviouslyFailedStep(clusterState, indexName, () -> now, policyRegistry, false) @@ -906,7 +887,7 @@ public class IndexLifecycleTransitionTests extends ESTestCase { LifecyclePolicy policy = createPolicy(policyName, failedStepKey, null); LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata( policy, - Collections.emptyMap(), + Map.of(), randomNonNegativeLong(), randomNonNegativeLong() ); @@ -923,12 +904,7 @@ public class IndexLifecycleTransitionTests extends ESTestCase { lifecycleState.setFailedStep(failedStepKey.name()); String initialStepInfo = randomAlphaOfLengthBetween(10, 50); lifecycleState.setStepInfo(initialStepInfo); - ClusterState clusterState = buildClusterState( - indexName, - indexSettingsBuilder, - lifecycleState.build(), - Collections.singletonList(policyMetadata) - ); + ClusterState clusterState = buildClusterState(indexName, indexSettingsBuilder, lifecycleState.build(), List.of(policyMetadata)); Index index = clusterState.metadata().getProject().index(indexName).getIndex(); ClusterState nextClusterState = IndexLifecycleTransition.moveClusterStateToPreviouslyFailedStep( clusterState, @@ -976,13 +952,11 @@ public class IndexLifecycleTransitionTests extends ESTestCase { Map actions = new HashMap<>(); actions.put("set_priority", new SetPriorityAction(100)); Phase hotPhase = new Phase("hot", TimeValue.ZERO, actions); - Map phases = Collections.singletonMap("hot", hotPhase); + Map phases = Map.of("hot", hotPhase); LifecyclePolicy currentPolicy = new LifecyclePolicy("my-policy", phases); List policyMetadatas = new ArrayList<>(); - policyMetadatas.add( - new LifecyclePolicyMetadata(currentPolicy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) - ); + policyMetadatas.add(new LifecyclePolicyMetadata(currentPolicy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong())); Step.StepKey errorStepKey = new Step.StepKey("hot", RolloverAction.NAME, ErrorStep.NAME); PolicyStepsRegistry stepsRegistry = createOneStepPolicyStepRegistry("my-policy", new ErrorStep(errorStepKey)); @@ -1043,9 +1017,9 @@ public class IndexLifecycleTransitionTests extends ESTestCase { actions.put("rollover", new RolloverAction(null, null, null, 1L, null, null, null, null, null, null)); actions.put("set_priority", new SetPriorityAction(100)); Phase hotPhase = new Phase("hot", TimeValue.ZERO, actions); - Map phases = Collections.singletonMap("hot", hotPhase); + Map phases = Map.of("hot", hotPhase); LifecyclePolicy newPolicy = new LifecyclePolicy("my-policy", phases); - LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata(newPolicy, Collections.emptyMap(), 2L, 2L); + LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata(newPolicy, Map.of(), 2L, 2L); ClusterState existingState = ClusterState.builder(ClusterState.EMPTY_STATE) .metadata(Metadata.builder(Metadata.EMPTY_METADATA).put(meta, false).build()) @@ -1188,7 +1162,7 @@ public class IndexLifecycleTransitionTests extends ESTestCase { actions.put("rollover", new RolloverAction(null, null, null, 1L, null, null, null, null, null, null)); actions.put("set_priority", new SetPriorityAction(100)); Phase hotPhase = new Phase("hot", TimeValue.ZERO, actions); - Map phases = Collections.singletonMap("hot", hotPhase); + Map phases = Map.of("hot", hotPhase); LifecyclePolicy currentPolicy = new LifecyclePolicy("my-policy", phases); { @@ -1198,10 +1172,10 @@ public class IndexLifecycleTransitionTests extends ESTestCase { Map actionsWithoutRollover = new HashMap<>(); actionsWithoutRollover.put("set_priority", new SetPriorityAction(100)); Phase hotPhaseNoRollover = new Phase("hot", TimeValue.ZERO, actionsWithoutRollover); - Map phasesNoRollover = Collections.singletonMap("hot", hotPhaseNoRollover); + Map phasesNoRollover = Map.of("hot", hotPhaseNoRollover); LifecyclePolicyMetadata updatedPolicyMetadata = new LifecyclePolicyMetadata( new LifecyclePolicy("my-policy", phasesNoRollover), - Collections.emptyMap(), + Map.of(), 2L, 2L ); @@ -1236,10 +1210,10 @@ public class IndexLifecycleTransitionTests extends ESTestCase { Map actionsWitoutSetPriority = new HashMap<>(); actionsWitoutSetPriority.put("rollover", new RolloverAction(null, null, null, 1L, null, null, null, null, null, null)); Phase hotPhaseNoSetPriority = new Phase("hot", TimeValue.ZERO, actionsWitoutSetPriority); - Map phasesWithoutSetPriority = Collections.singletonMap("hot", hotPhaseNoSetPriority); + Map phasesWithoutSetPriority = Map.of("hot", hotPhaseNoSetPriority); LifecyclePolicyMetadata updatedPolicyMetadata = new LifecyclePolicyMetadata( new LifecyclePolicy("my-policy", phasesWithoutSetPriority), - Collections.emptyMap(), + Map.of(), 2L, 2L ); @@ -1278,7 +1252,7 @@ public class IndexLifecycleTransitionTests extends ESTestCase { assert unsafeStep == null || safeStep.phase().equals(unsafeStep.phase()) == false : "safe and unsafe actions must be in different phases"; Map actions = new HashMap<>(); - List steps = Collections.singletonList(new MockStep(safeStep, null)); + List steps = List.of(new MockStep(safeStep, null)); MockAction safeAction = new MockAction(steps, true); actions.put(safeAction.getWriteableName(), safeAction); Phase phase = new Phase(safeStep.phase(), TimeValue.timeValueMillis(0), actions); @@ -1287,7 +1261,7 @@ public class IndexLifecycleTransitionTests extends ESTestCase { if (unsafeStep != null) { assert MockAction.NAME.equals(unsafeStep.action()) : "The unsafe action needs to be MockAction.NAME"; Map actions = new HashMap<>(); - List steps = Collections.singletonList(new MockStep(unsafeStep, null)); + List steps = List.of(new MockStep(unsafeStep, null)); MockAction unsafeAction = new MockAction(steps, false); actions.put(unsafeAction.getWriteableName(), unsafeAction); Phase phase = new Phase(unsafeStep.phase(), TimeValue.timeValueMillis(0), actions); diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/MoveToErrorStepUpdateTaskTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/MoveToErrorStepUpdateTaskTests.java index 804451d1f357..0f5154b50d43 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/MoveToErrorStepUpdateTaskTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/MoveToErrorStepUpdateTaskTests.java @@ -28,7 +28,7 @@ import org.elasticsearch.xpack.core.ilm.Step; import org.elasticsearch.xpack.core.ilm.Step.StepKey; import org.junit.Before; -import java.util.Collections; +import java.util.Map; import static org.elasticsearch.cluster.metadata.LifecycleExecutionState.ILM_CUSTOM_METADATA_KEY; import static org.hamcrest.Matchers.containsString; @@ -53,10 +53,7 @@ public class MoveToErrorStepUpdateTaskTests extends ESTestCase { .build(); index = indexMetadata.getIndex(); IndexLifecycleMetadata ilmMeta = new IndexLifecycleMetadata( - Collections.singletonMap( - policy, - new LifecyclePolicyMetadata(lifecyclePolicy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) - ), + Map.of(policy, new LifecyclePolicyMetadata(lifecyclePolicy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong())), OperationMode.RUNNING ); Metadata metadata = Metadata.builder() diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/MoveToNextStepUpdateTaskTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/MoveToNextStepUpdateTaskTests.java index 35dac4a2b169..0b2cbe30c551 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/MoveToNextStepUpdateTaskTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/MoveToNextStepUpdateTaskTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.xpack.core.ilm.Step.StepKey; import org.junit.Before; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.Map; @@ -67,10 +66,7 @@ public class MoveToNextStepUpdateTaskTests extends ESTestCase { index = indexMetadata.getIndex(); lifecyclePolicy = LifecyclePolicyTests.randomTestLifecyclePolicy(policy); IndexLifecycleMetadata ilmMeta = new IndexLifecycleMetadata( - Collections.singletonMap( - policy, - new LifecyclePolicyMetadata(lifecyclePolicy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) - ), + Map.of(policy, new LifecyclePolicyMetadata(lifecyclePolicy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong())), OperationMode.RUNNING ); Metadata metadata = Metadata.builder() @@ -95,7 +91,7 @@ public class MoveToNextStepUpdateTaskTests extends ESTestCase { AlwaysExistingStepRegistry stepRegistry = new AlwaysExistingStepRegistry(client); stepRegistry.update( new IndexLifecycleMetadata( - Map.of(policy, new LifecyclePolicyMetadata(lifecyclePolicy, Collections.emptyMap(), 2L, 2L)), + Map.of(policy, new LifecyclePolicyMetadata(lifecyclePolicy, Map.of(), 2L, 2L)), OperationMode.RUNNING ) ); @@ -169,7 +165,7 @@ public class MoveToNextStepUpdateTaskTests extends ESTestCase { AlwaysExistingStepRegistry stepRegistry = new AlwaysExistingStepRegistry(client); stepRegistry.update( new IndexLifecycleMetadata( - Map.of(policy, new LifecyclePolicyMetadata(lifecyclePolicy, Collections.emptyMap(), 2L, 2L)), + Map.of(policy, new LifecyclePolicyMetadata(lifecyclePolicy, Map.of(), 2L, 2L)), OperationMode.RUNNING ) ); diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/PolicyStepsRegistryTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/PolicyStepsRegistryTests.java index f1f8d095a219..5e3054e7c873 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/PolicyStepsRegistryTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/PolicyStepsRegistryTests.java @@ -46,7 +46,6 @@ import org.elasticsearch.xpack.core.ilm.ShrinkStep; import org.elasticsearch.xpack.core.ilm.Step; import org.mockito.Mockito; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -76,7 +75,7 @@ public class PolicyStepsRegistryTests extends ESTestCase { public void testGetFirstStep() { String policyName = randomAlphaOfLengthBetween(2, 10); Step expectedFirstStep = new MockStep(MOCK_STEP_KEY, null); - Map firstStepMap = Collections.singletonMap(policyName, expectedFirstStep); + Map firstStepMap = Map.of(policyName, expectedFirstStep); PolicyStepsRegistry registry = new PolicyStepsRegistry(null, firstStepMap, null, NamedXContentRegistry.EMPTY, null, null); Step actualFirstStep = registry.getFirstStep(policyName); assertThat(actualFirstStep, sameInstance(expectedFirstStep)); @@ -85,7 +84,7 @@ public class PolicyStepsRegistryTests extends ESTestCase { public void testGetFirstStepUnknownPolicy() { String policyName = randomAlphaOfLengthBetween(2, 10); Step expectedFirstStep = new MockStep(MOCK_STEP_KEY, null); - Map firstStepMap = Collections.singletonMap(policyName, expectedFirstStep); + Map firstStepMap = Map.of(policyName, expectedFirstStep); PolicyStepsRegistry registry = new PolicyStepsRegistry(null, firstStepMap, null, NamedXContentRegistry.EMPTY, null, null); Step actualFirstStep = registry.getFirstStep(policyName + "unknown"); assertNull(actualFirstStep); @@ -95,7 +94,7 @@ public class PolicyStepsRegistryTests extends ESTestCase { Client client = mock(Client.class); Mockito.when(client.settings()).thenReturn(Settings.EMPTY); LifecyclePolicy policy = LifecyclePolicyTests.randomTimeseriesLifecyclePolicyWithAllPhases("policy"); - LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata(policy, Collections.emptyMap(), 1, randomNonNegativeLong()); + LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata(policy, Map.of(), 1, randomNonNegativeLong()); String phaseName = randomFrom(policy.getPhases().keySet()); Phase phase = policy.getPhases().get(phaseName); PhaseExecutionInfo pei = new PhaseExecutionInfo(policy.getName(), phase, 1, randomNonNegativeLong()); @@ -119,7 +118,7 @@ public class PolicyStepsRegistryTests extends ESTestCase { Step.StepKey errorStepKey = new Step.StepKey(randomAlphaOfLengthBetween(1, 10), randomAlphaOfLengthBetween(1, 10), ErrorStep.NAME); Step expectedStep = new ErrorStep(errorStepKey); Index index = new Index("test", "uuid"); - Map> indexSteps = Collections.singletonMap(index, Collections.singletonList(expectedStep)); + Map> indexSteps = Map.of(index, List.of(expectedStep)); PolicyStepsRegistry registry = new PolicyStepsRegistry(null, null, null, NamedXContentRegistry.EMPTY, null, null); Step actualStep = registry.getStep(emptyMetadata(index), errorStepKey); assertThat(actualStep, equalTo(expectedStep)); @@ -143,7 +142,7 @@ public class PolicyStepsRegistryTests extends ESTestCase { Client client = mock(Client.class); Mockito.when(client.settings()).thenReturn(Settings.EMPTY); LifecyclePolicy policy = LifecyclePolicyTests.randomTimeseriesLifecyclePolicy("policy"); - LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata(policy, Collections.emptyMap(), 1, randomNonNegativeLong()); + LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata(policy, Map.of(), 1, randomNonNegativeLong()); IndexMetadata indexMetadata = IndexMetadata.builder("test") .settings(indexSettings(IndexVersion.current(), 1, 0).put(LifecycleSettings.LIFECYCLE_NAME, "policy").build()) .build(); @@ -158,7 +157,7 @@ public class PolicyStepsRegistryTests extends ESTestCase { Client client = mock(Client.class); Mockito.when(client.settings()).thenReturn(Settings.EMPTY); LifecyclePolicy policy = LifecyclePolicyTests.randomTimeseriesLifecyclePolicyWithAllPhases("policy"); - LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata(policy, Collections.emptyMap(), 1, randomNonNegativeLong()); + LifecyclePolicyMetadata policyMetadata = new LifecyclePolicyMetadata(policy, Map.of(), 1, randomNonNegativeLong()); String phaseName = randomFrom(policy.getPhases().keySet()); Phase phase = policy.getPhases().get(phaseName); PhaseExecutionInfo pei = new PhaseExecutionInfo(policy.getName(), phase, 1, randomNonNegativeLong()); @@ -193,7 +192,7 @@ public class PolicyStepsRegistryTests extends ESTestCase { headers.put(randomAlphaOfLength(10), randomAlphaOfLength(10)); headers.put(randomAlphaOfLength(10), randomAlphaOfLength(10)); } - Map policyMap = Collections.singletonMap( + Map policyMap = Map.of( newPolicy.getName(), new LifecyclePolicyMetadata(newPolicy, headers, randomNonNegativeLong(), randomNonNegativeLong()) ); @@ -271,7 +270,7 @@ public class PolicyStepsRegistryTests extends ESTestCase { assertThat(registry.getStepMap(), equalTo(registryStepMap)); // remove policy - lifecycleMetadata = new IndexLifecycleMetadata(Collections.emptyMap(), OperationMode.RUNNING); + lifecycleMetadata = new IndexLifecycleMetadata(Map.of(), OperationMode.RUNNING); currentState = ClusterState.builder(currentState) .metadata(Metadata.builder(metadata).putCustom(IndexLifecycleMetadata.TYPE, lifecycleMetadata)) .build(); @@ -291,7 +290,7 @@ public class PolicyStepsRegistryTests extends ESTestCase { headers.put(randomAlphaOfLength(10), randomAlphaOfLength(10)); headers.put(randomAlphaOfLength(10), randomAlphaOfLength(10)); } - Map policyMap = Collections.singletonMap( + Map policyMap = Map.of( newPolicy.getName(), new LifecyclePolicyMetadata(newPolicy, headers, randomNonNegativeLong(), randomNonNegativeLong()) ); @@ -316,10 +315,7 @@ public class PolicyStepsRegistryTests extends ESTestCase { // swap out policy newPolicy = LifecyclePolicyTests.randomTestLifecyclePolicy(policyName); lifecycleMetadata = new IndexLifecycleMetadata( - Collections.singletonMap( - policyName, - new LifecyclePolicyMetadata(newPolicy, Collections.emptyMap(), randomNonNegativeLong(), randomNonNegativeLong()) - ), + Map.of(policyName, new LifecyclePolicyMetadata(newPolicy, Map.of(), randomNonNegativeLong(), randomNonNegativeLong())), OperationMode.RUNNING ); currentState = ClusterState.builder(currentState) @@ -356,7 +352,7 @@ public class PolicyStepsRegistryTests extends ESTestCase { headers.put(randomAlphaOfLength(10), randomAlphaOfLength(10)); headers.put(randomAlphaOfLength(10), randomAlphaOfLength(10)); } - Map policyMap = Collections.singletonMap( + Map policyMap = Map.of( newPolicy.getName(), new LifecyclePolicyMetadata(newPolicy, headers, randomNonNegativeLong(), randomNonNegativeLong()) ); @@ -411,7 +407,7 @@ public class PolicyStepsRegistryTests extends ESTestCase { assertThat(((ShrinkStep) gotStep).getNumberOfShards(), equalTo(1)); // Update the policy with the new policy, but keep the phase the same - policyMap = Collections.singletonMap( + policyMap = Map.of( updatedPolicy.getName(), new LifecyclePolicyMetadata(updatedPolicy, headers, randomNonNegativeLong(), randomNonNegativeLong()) ); @@ -457,7 +453,7 @@ public class PolicyStepsRegistryTests extends ESTestCase { .build(); SortedMap metas = new TreeMap<>(); - metas.put("policy", new LifecyclePolicyMetadata(policy, Collections.emptyMap(), 1, randomNonNegativeLong())); + metas.put("policy", new LifecyclePolicyMetadata(policy, Map.of(), 1, randomNonNegativeLong())); IndexLifecycleMetadata meta = new IndexLifecycleMetadata(metas, OperationMode.RUNNING); PolicyStepsRegistry registry = new PolicyStepsRegistry(REGISTRY, client, null); diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/StagnatingIndicesFinderTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/StagnatingIndicesFinderTests.java index be2d44935324..95412f92b615 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/StagnatingIndicesFinderTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/StagnatingIndicesFinderTests.java @@ -28,7 +28,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.function.LongSupplier; -import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; @@ -61,7 +60,7 @@ public class StagnatingIndicesFinderTests extends ESTestCase { assertEquals(expectedMaxTimeOnStep, maxTimeOnStep); assertEquals(expectedMaxRetriesPerStep, maxRetriesPerStep); return rc; - }).collect(Collectors.toList()); + }).toList(); // Per the evaluator, the timeSupplier _must_ be called only twice when(mockedTimeSupplier.getAsLong()).thenReturn(instant, instant); diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/action/TransportStopILMActionTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/action/TransportStopILMActionTests.java index 8c0fede4c11d..bd0d63ebb0f3 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/action/TransportStopILMActionTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/action/TransportStopILMActionTests.java @@ -24,7 +24,8 @@ import org.elasticsearch.xpack.core.ilm.StopILMRequest; import org.elasticsearch.xpack.core.ilm.action.ILMActions; import org.mockito.ArgumentMatcher; -import static java.util.Collections.emptyMap; +import java.util.Map; + import static org.mockito.ArgumentMatchers.argThat; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; @@ -50,7 +51,7 @@ public class TransportStopILMActionTests extends ESTestCase { ILMActions.STOP.name(), "description", new TaskId(randomLong() + ":" + randomLong()), - emptyMap() + Map.of() ); StopILMRequest request = new StopILMRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT); transportStopILMAction.masterOperation(task, request, ClusterState.EMPTY_STATE, ActionListener.noop()); diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java index 3b0fc869c812..c7b3a9d42f57 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java @@ -22,6 +22,7 @@ import org.elasticsearch.inference.SimilarityMeasure; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; import org.elasticsearch.xpack.inference.Utils; import org.elasticsearch.xpack.inference.mock.TestDenseInferenceServiceExtension; import org.elasticsearch.xpack.inference.mock.TestSparseInferenceServiceExtension; @@ -58,7 +59,7 @@ public class ShardBulkInferenceActionFilterIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Arrays.asList(Utils.TestInferencePlugin.class); + return Arrays.asList(LocalStateInferencePlugin.class); } public void testBulkOperations() throws Exception { diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java index be6b3725b0f3..d5c156d1d4f4 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryIT.java @@ -31,7 +31,7 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xpack.inference.InferencePlugin; +import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests; import org.elasticsearch.xpack.inference.registry.ModelRegistry; import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalModel; @@ -76,7 +76,7 @@ public class ModelRegistryIT extends ESSingleNodeTestCase { @Override protected Collection> getPlugins() { - return pluginList(ReindexPlugin.class, InferencePlugin.class); + return pluginList(ReindexPlugin.class, LocalStateInferencePlugin.class); } public void testStoreModel() throws Exception { diff --git a/x-pack/plugin/inference/src/main/java/module-info.java b/x-pack/plugin/inference/src/main/java/module-info.java index 53974657e4e2..1c2240e8c521 100644 --- a/x-pack/plugin/inference/src/main/java/module-info.java +++ b/x-pack/plugin/inference/src/main/java/module-info.java @@ -34,6 +34,7 @@ module org.elasticsearch.inference { requires software.amazon.awssdk.retries.api; requires org.reactivestreams; requires org.elasticsearch.logging; + requires org.elasticsearch.sslconfig; exports org.elasticsearch.xpack.inference.action; exports org.elasticsearch.xpack.inference.registry; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java index 67892dfe7862..3b7613b8b0e1 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java @@ -10,6 +10,7 @@ package org.elasticsearch.xpack.inference; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.xpack.inference.mapper.SemanticTextFieldMapper; +import org.elasticsearch.xpack.inference.queries.SemanticMatchQueryRewriteInterceptor; import org.elasticsearch.xpack.inference.queries.SemanticQueryBuilder; import org.elasticsearch.xpack.inference.rank.random.RandomRankRetrieverBuilder; import org.elasticsearch.xpack.inference.rank.textsimilarity.TextSimilarityRankRetrieverBuilder; @@ -43,7 +44,8 @@ public class InferenceFeatures implements FeatureSpecification { SemanticTextFieldMapper.SEMANTIC_TEXT_DELETE_FIX, SemanticTextFieldMapper.SEMANTIC_TEXT_ZERO_SIZE_FIX, SemanticTextFieldMapper.SEMANTIC_TEXT_ALWAYS_EMIT_INFERENCE_ID_FIX, - SEMANTIC_TEXT_HIGHLIGHTER + SEMANTIC_TEXT_HIGHLIGHTER, + SemanticMatchQueryRewriteInterceptor.SEMANTIC_MATCH_QUERY_REWRITE_INTERCEPTION_SUPPORTED ); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 148a78445636..93743a5485c2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -28,6 +28,7 @@ import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.inference.InferenceServiceExtension; import org.elasticsearch.inference.InferenceServiceRegistry; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.node.PluginComponentBinding; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.ExtensiblePlugin; @@ -35,6 +36,7 @@ import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.plugins.SystemIndexPlugin; +import org.elasticsearch.plugins.internal.rewriter.QueryRewriteInterceptor; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.search.fetch.subphase.highlight.Highlighter; @@ -44,6 +46,7 @@ import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ScalingExecutorBuilder; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; import org.elasticsearch.xpack.core.inference.action.DeleteInferenceEndpointAction; import org.elasticsearch.xpack.core.inference.action.GetInferenceDiagnosticsAction; @@ -53,6 +56,7 @@ import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.action.PutInferenceModelAction; import org.elasticsearch.xpack.core.inference.action.UnifiedCompletionAction; import org.elasticsearch.xpack.core.inference.action.UpdateInferenceModelAction; +import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.inference.action.TransportDeleteInferenceEndpointAction; import org.elasticsearch.xpack.inference.action.TransportGetInferenceDiagnosticsAction; import org.elasticsearch.xpack.inference.action.TransportGetInferenceModelAction; @@ -74,6 +78,7 @@ import org.elasticsearch.xpack.inference.highlight.SemanticTextHighlighter; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.elasticsearch.xpack.inference.mapper.OffsetSourceFieldMapper; import org.elasticsearch.xpack.inference.mapper.SemanticTextFieldMapper; +import org.elasticsearch.xpack.inference.queries.SemanticMatchQueryRewriteInterceptor; import org.elasticsearch.xpack.inference.queries.SemanticQueryBuilder; import org.elasticsearch.xpack.inference.rank.random.RandomRankBuilder; import org.elasticsearch.xpack.inference.rank.random.RandomRankRetrieverBuilder; @@ -116,7 +121,6 @@ import java.util.List; import java.util.Map; import java.util.function.Predicate; import java.util.function.Supplier; -import java.util.stream.Collectors; import java.util.stream.Stream; import static java.util.Collections.singletonList; @@ -150,6 +154,7 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP private final Settings settings; private final SetOnce httpFactory = new SetOnce<>(); private final SetOnce amazonBedrockFactory = new SetOnce<>(); + private final SetOnce elasicInferenceServiceFactory = new SetOnce<>(); private final SetOnce serviceComponents = new SetOnce<>(); private final SetOnce elasticInferenceServiceComponents = new SetOnce<>(); private final SetOnce inferenceServiceRegistry = new SetOnce<>(); @@ -232,31 +237,31 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP var inferenceServices = new ArrayList<>(inferenceServiceExtensions); inferenceServices.add(this::getInferenceServiceFactories); - // Set elasticInferenceUrl based on feature flags to support transitioning to the new Elastic Inference Service URL without exposing - // internal names like "eis" or "gateway". - ElasticInferenceServiceSettings inferenceServiceSettings = new ElasticInferenceServiceSettings(settings); - - String elasticInferenceUrl = null; - - if (ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { - elasticInferenceUrl = inferenceServiceSettings.getElasticInferenceServiceUrl(); - } else if (DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { - log.warn( - "Deprecated flag {} detected for enabling {}. Please use {}.", - ELASTIC_INFERENCE_SERVICE_IDENTIFIER, - DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG, - ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG + if (isElasticInferenceServiceEnabled()) { + // Create a separate instance of HTTPClientManager with its own SSL configuration (`xpack.inference.elastic.http.ssl.*`). + var elasticInferenceServiceHttpClientManager = HttpClientManager.create( + settings, + services.threadPool(), + services.clusterService(), + throttlerManager, + getSslService() ); - elasticInferenceUrl = inferenceServiceSettings.getEisGatewayUrl(); - } - if (elasticInferenceUrl != null) { + var elasticInferenceServiceRequestSenderFactory = new HttpRequestSender.Factory( + serviceComponents.get(), + elasticInferenceServiceHttpClientManager, + services.clusterService() + ); + elasicInferenceServiceFactory.set(elasticInferenceServiceRequestSenderFactory); + + ElasticInferenceServiceSettings inferenceServiceSettings = new ElasticInferenceServiceSettings(settings); + String elasticInferenceUrl = this.getElasticInferenceServiceUrl(inferenceServiceSettings); elasticInferenceServiceComponents.set(new ElasticInferenceServiceComponents(elasticInferenceUrl)); inferenceServices.add( () -> List.of( context -> new ElasticInferenceService( - httpFactory.get(), + elasicInferenceServiceFactory.get(), serviceComponents.get(), elasticInferenceServiceComponents.get() ) @@ -379,16 +384,21 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP @Override public List> getSettings() { - return Stream.of( - HttpSettings.getSettingsDefinitions(), - HttpClientManager.getSettingsDefinitions(), - ThrottlerManager.getSettingsDefinitions(), - RetrySettings.getSettingsDefinitions(), - ElasticInferenceServiceSettings.getSettingsDefinitions(), - Truncator.getSettingsDefinitions(), - RequestExecutorServiceSettings.getSettingsDefinitions(), - List.of(SKIP_VALIDATE_AND_START) - ).flatMap(Collection::stream).collect(Collectors.toList()); + ArrayList> settings = new ArrayList<>(); + settings.addAll(HttpSettings.getSettingsDefinitions()); + settings.addAll(HttpClientManager.getSettingsDefinitions()); + settings.addAll(ThrottlerManager.getSettingsDefinitions()); + settings.addAll(RetrySettings.getSettingsDefinitions()); + settings.addAll(Truncator.getSettingsDefinitions()); + settings.addAll(RequestExecutorServiceSettings.getSettingsDefinitions()); + settings.add(SKIP_VALIDATE_AND_START); + + // Register Elastic Inference Service settings definitions if the corresponding feature flag is enabled. + if (isElasticInferenceServiceEnabled()) { + settings.addAll(ElasticInferenceServiceSettings.getSettingsDefinitions()); + } + + return settings; } @Override @@ -428,10 +438,18 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP return List.of(new QuerySpec<>(SemanticQueryBuilder.NAME, SemanticQueryBuilder::new, SemanticQueryBuilder::fromXContent)); } + @Override + public List getQueryRewriteInterceptors() { + return List.of(new SemanticMatchQueryRewriteInterceptor()); + } + @Override public List> getRetrievers() { return List.of( - new RetrieverSpec<>(new ParseField(TextSimilarityRankBuilder.NAME), TextSimilarityRankRetrieverBuilder::fromXContent), + new RetrieverSpec<>( + new ParseField(TextSimilarityRankBuilder.NAME), + (parser, context) -> TextSimilarityRankRetrieverBuilder.fromXContent(parser, context, getLicenseState()) + ), new RetrieverSpec<>(new ParseField(RandomRankBuilder.NAME), RandomRankRetrieverBuilder::fromXContent) ); } @@ -440,4 +458,36 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP public Map getHighlighters() { return Map.of(SemanticTextHighlighter.NAME, new SemanticTextHighlighter()); } + + // Get Elastic Inference service URL based on feature flags to support transitioning + // to the new Elastic Inference Service URL. + private String getElasticInferenceServiceUrl(ElasticInferenceServiceSettings settings) { + String elasticInferenceUrl = null; + + if (ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + elasticInferenceUrl = settings.getElasticInferenceServiceUrl(); + } else if (DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + log.warn( + "Deprecated flag {} detected for enabling {}. Please use {}.", + ELASTIC_INFERENCE_SERVICE_IDENTIFIER, + DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG, + ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG + ); + elasticInferenceUrl = settings.getEisGatewayUrl(); + } + + return elasticInferenceUrl; + } + + protected Boolean isElasticInferenceServiceEnabled() { + return (ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() || DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()); + } + + protected SSLService getSslService() { + return XPackPlugin.getSharedSslService(); + } + + protected XPackLicenseState getLicenseState() { + return XPackPlugin.getSharedLicenseState(); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunker.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunker.java index b2d6c83b8921..bf28e30074a9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunker.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunker.java @@ -62,7 +62,8 @@ public class SentenceBoundaryChunker implements Chunker { * * @param input Text to chunk * @param maxNumberWordsPerChunk Maximum size of the chunk - * @return The input text chunked + * @param includePrecedingSentence Include the previous sentence + * @return The input text offsets */ public List chunk(String input, int maxNumberWordsPerChunk, boolean includePrecedingSentence) { var chunks = new ArrayList(); @@ -158,6 +159,11 @@ public class SentenceBoundaryChunker implements Chunker { chunks.add(new ChunkOffset(chunkStart, input.length())); } + if (chunks.isEmpty()) { + // The input did not chunk, return the entire input + chunks.add(new ChunkOffset(0, input.length())); + } + return chunks; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunker.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunker.java index b15e2134f4cf..1ce90a9e416e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunker.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunker.java @@ -96,10 +96,6 @@ public class WordBoundaryChunker implements Chunker { throw new IllegalArgumentException("Invalid chunking parameters, overlap [" + overlap + "] must be >= 0"); } - if (input.isEmpty()) { - return List.of(); - } - var chunkPositions = new ArrayList(); // This position in the chunk is where the next overlapping chunk will start diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java index e5d76b9bb557..6d09c9e67b36 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/HttpClientManager.java @@ -7,9 +7,14 @@ package org.elasticsearch.xpack.inference.external.http; +import org.apache.http.config.Registry; +import org.apache.http.config.RegistryBuilder; import org.apache.http.impl.nio.conn.PoolingNHttpClientConnectionManager; import org.apache.http.impl.nio.reactor.DefaultConnectingIOReactor; import org.apache.http.impl.nio.reactor.IOReactorConfig; +import org.apache.http.nio.conn.NoopIOSessionStrategy; +import org.apache.http.nio.conn.SchemeIOSessionStrategy; +import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy; import org.apache.http.nio.reactor.ConnectingIOReactor; import org.apache.http.nio.reactor.IOReactorException; import org.apache.http.pool.PoolStats; @@ -21,6 +26,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import java.io.Closeable; @@ -28,11 +34,13 @@ import java.io.IOException; import java.util.List; import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceSettings.ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX; public class HttpClientManager implements Closeable { private static final Logger logger = LogManager.getLogger(HttpClientManager.class); /** * The maximum number of total connections the connection pool can lease to all routes. + * The configuration applies to each instance of HTTPClientManager (max_total_connections=10 and instances=5 leads to 50 connections). * From googling around the connection pools maxTotal value should be close to the number of available threads. * * https://stackoverflow.com/questions/30989637/how-to-decide-optimal-settings-for-setmaxtotal-and-setdefaultmaxperroute @@ -47,6 +55,7 @@ public class HttpClientManager implements Closeable { /** * The max number of connections a single route can lease. + * This configuration applies to each instance of HttpClientManager. */ public static final Setting MAX_ROUTE_CONNECTIONS = Setting.intSetting( "xpack.inference.http.max_route_connections", @@ -98,6 +107,22 @@ public class HttpClientManager implements Closeable { return new HttpClientManager(settings, connectionManager, threadPool, clusterService, throttlerManager); } + public static HttpClientManager create( + Settings settings, + ThreadPool threadPool, + ClusterService clusterService, + ThrottlerManager throttlerManager, + SSLService sslService + ) { + // Set the sslStrategy to ensure an encrypted connection, as Elastic Inference Service requires it. + SSLIOSessionStrategy sslioSessionStrategy = sslService.sslIOSessionStrategy( + sslService.getSSLConfiguration(ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX) + ); + + PoolingNHttpClientConnectionManager connectionManager = createConnectionManager(sslioSessionStrategy); + return new HttpClientManager(settings, connectionManager, threadPool, clusterService, throttlerManager); + } + // Default for testing HttpClientManager( Settings settings, @@ -121,6 +146,25 @@ public class HttpClientManager implements Closeable { this.addSettingsUpdateConsumers(clusterService); } + private static PoolingNHttpClientConnectionManager createConnectionManager(SSLIOSessionStrategy sslStrategy) { + ConnectingIOReactor ioReactor; + try { + var configBuilder = IOReactorConfig.custom().setSoKeepAlive(true); + ioReactor = new DefaultConnectingIOReactor(configBuilder.build()); + } catch (IOReactorException e) { + var message = "Failed to initialize HTTP client manager with SSL."; + logger.error(message, e); + throw new ElasticsearchException(message, e); + } + + Registry registry = RegistryBuilder.create() + .register("http", NoopIOSessionStrategy.INSTANCE) + .register("https", sslStrategy) + .build(); + + return new PoolingNHttpClientConnectionManager(ioReactor, registry); + } + private static PoolingNHttpClientConnectionManager createConnectionManager() { ConnectingIOReactor ioReactor; try { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticMatchQueryRewriteInterceptor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticMatchQueryRewriteInterceptor.java new file mode 100644 index 000000000000..a4a8123935c3 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticMatchQueryRewriteInterceptor.java @@ -0,0 +1,95 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.queries; + +import org.elasticsearch.action.ResolvedIndices; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.InferenceFieldMetadata; +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.index.mapper.IndexFieldMapper; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.MatchQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryRewriteContext; +import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.index.query.TermsQueryBuilder; +import org.elasticsearch.plugins.internal.rewriter.QueryRewriteInterceptor; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +public class SemanticMatchQueryRewriteInterceptor implements QueryRewriteInterceptor { + + public static final NodeFeature SEMANTIC_MATCH_QUERY_REWRITE_INTERCEPTION_SUPPORTED = new NodeFeature( + "search.semantic_match_query_rewrite_interception_supported" + ); + + public SemanticMatchQueryRewriteInterceptor() {} + + @Override + public QueryBuilder interceptAndRewrite(QueryRewriteContext context, QueryBuilder queryBuilder) { + assert (queryBuilder instanceof MatchQueryBuilder); + MatchQueryBuilder matchQueryBuilder = (MatchQueryBuilder) queryBuilder; + QueryBuilder rewritten = queryBuilder; + ResolvedIndices resolvedIndices = context.getResolvedIndices(); + if (resolvedIndices != null) { + Collection indexMetadataCollection = resolvedIndices.getConcreteLocalIndicesMetadata().values(); + List inferenceIndices = new ArrayList<>(); + List nonInferenceIndices = new ArrayList<>(); + for (IndexMetadata indexMetadata : indexMetadataCollection) { + String indexName = indexMetadata.getIndex().getName(); + InferenceFieldMetadata inferenceFieldMetadata = indexMetadata.getInferenceFields().get(matchQueryBuilder.fieldName()); + if (inferenceFieldMetadata != null) { + inferenceIndices.add(indexName); + } else { + nonInferenceIndices.add(indexName); + } + } + + if (inferenceIndices.isEmpty()) { + return rewritten; + } else if (nonInferenceIndices.isEmpty() == false) { + BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder(); + for (String inferenceIndexName : inferenceIndices) { + // Add a separate clause for each semantic query, because they may be using different inference endpoints + // TODO - consolidate this to a single clause once the semantic query supports multiple inference endpoints + boolQueryBuilder.should( + createSemanticSubQuery(inferenceIndexName, matchQueryBuilder.fieldName(), (String) matchQueryBuilder.value()) + ); + } + boolQueryBuilder.should(createMatchSubQuery(nonInferenceIndices, matchQueryBuilder)); + rewritten = boolQueryBuilder; + } else { + rewritten = new SemanticQueryBuilder(matchQueryBuilder.fieldName(), (String) matchQueryBuilder.value(), false); + } + } + + return rewritten; + + } + + @Override + public String getQueryName() { + return MatchQueryBuilder.NAME; + } + + private QueryBuilder createSemanticSubQuery(String indexName, String fieldName, String value) { + BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder(); + boolQueryBuilder.must(new SemanticQueryBuilder(fieldName, value, true)); + boolQueryBuilder.filter(new TermQueryBuilder(IndexFieldMapper.NAME, indexName)); + return boolQueryBuilder; + } + + private QueryBuilder createMatchSubQuery(List indices, MatchQueryBuilder matchQueryBuilder) { + BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder(); + boolQueryBuilder.must(matchQueryBuilder); + boolQueryBuilder.filter(new TermsQueryBuilder(IndexFieldMapper.NAME, indices)); + return boolQueryBuilder; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java index d648db2fbfdb..2a34651efcd9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilder.java @@ -46,6 +46,7 @@ import java.util.Map; import java.util.Objects; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; @@ -57,16 +58,18 @@ public class SemanticQueryBuilder extends AbstractQueryBuilder PARSER = new ConstructingObjectParser<>( NAME, false, - args -> new SemanticQueryBuilder((String) args[0], (String) args[1]) + args -> new SemanticQueryBuilder((String) args[0], (String) args[1], (Boolean) args[2]) ); static { PARSER.declareString(constructorArg(), FIELD_FIELD); PARSER.declareString(constructorArg(), QUERY_FIELD); + PARSER.declareBoolean(optionalConstructorArg(), LENIENT_FIELD); declareStandardFields(PARSER); } @@ -75,8 +78,13 @@ public class SemanticQueryBuilder extends AbstractQueryBuilder inferenceResultsSupplier; private final InferenceResults inferenceResults; private final boolean noInferenceResults; + private final Boolean lenient; public SemanticQueryBuilder(String fieldName, String query) { + this(fieldName, query, null); + } + + public SemanticQueryBuilder(String fieldName, String query, Boolean lenient) { if (fieldName == null) { throw new IllegalArgumentException("[" + NAME + "] requires a " + FIELD_FIELD.getPreferredName() + " value"); } @@ -88,6 +96,7 @@ public class SemanticQueryBuilder extends AbstractQueryBuilder EIS_GATEWAY_URL = Setting.simpleString("xpack.inference.eis.gateway.url", Setting.Property.NodeScope); + public static final String ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX = "xpack.inference.elastic.http.ssl."; + static final Setting ELASTIC_INFERENCE_SERVICE_URL = Setting.simpleString( "xpack.inference.elastic.url", Setting.Property.NodeScope @@ -31,11 +35,27 @@ public class ElasticInferenceServiceSettings { public ElasticInferenceServiceSettings(Settings settings) { eisGatewayUrl = EIS_GATEWAY_URL.get(settings); elasticInferenceServiceUrl = ELASTIC_INFERENCE_SERVICE_URL.get(settings); - } + public static final SSLConfigurationSettings ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_SETTINGS = SSLConfigurationSettings.withPrefix( + ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX, + false + ); + + public static final Setting ELASTIC_INFERENCE_SERVICE_SSL_ENABLED = Setting.boolSetting( + ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX + "enabled", + true, + Setting.Property.NodeScope + ); + public static List> getSettingsDefinitions() { - return List.of(EIS_GATEWAY_URL, ELASTIC_INFERENCE_SERVICE_URL); + ArrayList> settings = new ArrayList<>(); + settings.add(EIS_GATEWAY_URL); + settings.add(ELASTIC_INFERENCE_SERVICE_URL); + settings.add(ELASTIC_INFERENCE_SERVICE_SSL_ENABLED); + settings.addAll(ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_SETTINGS.getEnabledSettings()); + + return settings; } @Deprecated diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/cluster/metadata/SemanticTextClusterMetadataTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/cluster/metadata/SemanticTextClusterMetadataTests.java index 54aa72f3e926..92ed0b610322 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/cluster/metadata/SemanticTextClusterMetadataTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/cluster/metadata/SemanticTextClusterMetadataTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.service.ClusterStateTaskExecutorUtils; import org.elasticsearch.index.IndexService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.inference.InferencePlugin; import org.hamcrest.Matchers; @@ -28,7 +29,7 @@ public class SemanticTextClusterMetadataTests extends ESSingleNodeTestCase { @Override protected Collection> getPlugins() { - return List.of(InferencePlugin.class); + return List.of(XPackPlugin.class, InferencePlugin.class); } public void testCreateIndexWithSemanticTextField() { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/InferencePluginTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/InferencePluginTests.java new file mode 100644 index 000000000000..d1db5b8b12cc --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/InferencePluginTests.java @@ -0,0 +1,65 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceSettings; +import org.junit.After; +import org.junit.Before; + +import static org.hamcrest.Matchers.is; + +public class InferencePluginTests extends ESTestCase { + private InferencePlugin inferencePlugin; + + private Boolean elasticInferenceServiceEnabled = true; + + private void setElasticInferenceServiceEnabled(Boolean elasticInferenceServiceEnabled) { + this.elasticInferenceServiceEnabled = elasticInferenceServiceEnabled; + } + + @Before + public void setUp() throws Exception { + super.setUp(); + + Settings settings = Settings.builder().build(); + inferencePlugin = new InferencePlugin(settings) { + @Override + protected Boolean isElasticInferenceServiceEnabled() { + return elasticInferenceServiceEnabled; + } + }; + } + + @After + public void tearDown() throws Exception { + super.tearDown(); + } + + public void testElasticInferenceServiceSettingsPresent() throws Exception { + setElasticInferenceServiceEnabled(true); // enable elastic inference service + boolean anyMatch = inferencePlugin.getSettings() + .stream() + .map(Setting::getKey) + .anyMatch(key -> key.startsWith(ElasticInferenceServiceSettings.ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX)); + + assertThat("xpack.inference.elastic settings are present", anyMatch, is(true)); + } + + public void testElasticInferenceServiceSettingsNotPresent() throws Exception { + setElasticInferenceServiceEnabled(false); // disable elastic inference service + boolean noneMatch = inferencePlugin.getSettings() + .stream() + .map(Setting::getKey) + .noneMatch(key -> key.startsWith(ElasticInferenceServiceSettings.ELASTIC_INFERENCE_SERVICE_SSL_CONFIGURATION_PREFIX)); + + assertThat("xpack.inference.elastic settings are not present", noneMatch, is(true)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/LocalStateInferencePlugin.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/LocalStateInferencePlugin.java new file mode 100644 index 000000000000..68ea175bd987 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/LocalStateInferencePlugin.java @@ -0,0 +1,71 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.action.support.MappedActionFilter; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.inference.InferenceServiceExtension; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.plugins.SearchPlugin; +import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; +import org.elasticsearch.xpack.core.ssl.SSLService; +import org.elasticsearch.xpack.inference.mock.TestDenseInferenceServiceExtension; +import org.elasticsearch.xpack.inference.mock.TestSparseInferenceServiceExtension; + +import java.nio.file.Path; +import java.util.Collection; +import java.util.List; +import java.util.Map; + +import static java.util.stream.Collectors.toList; + +public class LocalStateInferencePlugin extends LocalStateCompositeXPackPlugin { + private final InferencePlugin inferencePlugin; + + public LocalStateInferencePlugin(final Settings settings, final Path configPath) throws Exception { + super(settings, configPath); + LocalStateInferencePlugin thisVar = this; + this.inferencePlugin = new InferencePlugin(settings) { + @Override + protected SSLService getSslService() { + return thisVar.getSslService(); + } + + @Override + protected XPackLicenseState getLicenseState() { + return thisVar.getLicenseState(); + } + + @Override + public List getInferenceServiceFactories() { + return List.of( + TestSparseInferenceServiceExtension.TestInferenceService::new, + TestDenseInferenceServiceExtension.TestInferenceService::new + ); + } + }; + plugins.add(inferencePlugin); + } + + @Override + public List> getRetrievers() { + return this.filterPlugins(SearchPlugin.class).stream().flatMap(p -> p.getRetrievers().stream()).collect(toList()); + } + + @Override + public Map getMappers() { + return inferencePlugin.getMappers(); + } + + @Override + public Collection getMappedActionFilters() { + return inferencePlugin.getMappedActionFilters(); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java index 9395ae222e9b..0f322e64755b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; -import org.elasticsearch.inference.InferenceServiceExtension; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; @@ -143,20 +142,6 @@ public final class Utils { latch.await(); } - public static class TestInferencePlugin extends InferencePlugin { - public TestInferencePlugin(Settings settings) { - super(settings); - } - - @Override - public List getInferenceServiceFactories() { - return List.of( - TestSparseInferenceServiceExtension.TestInferenceService::new, - TestDenseInferenceServiceExtension.TestInferenceService::new - ); - } - } - public static Model getInvalidModel(String inferenceEntityId, String serviceName) { var mockConfigs = mock(ModelConfigurations.class); when(mockConfigs.getInferenceEntityId()).thenReturn(inferenceEntityId); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java index 2416aeb62ff3..c68a629b999c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java @@ -102,7 +102,7 @@ public class ShardBulkInferenceActionFilterTests extends ESTestCase { new BulkItemRequest[0] ); request.setInferenceFieldMap( - Map.of("foo", new InferenceFieldMetadata("foo", "bar", generateRandomStringArray(5, 10, false, false))) + Map.of("foo", new InferenceFieldMetadata("foo", "bar", "baz", generateRandomStringArray(5, 10, false, false))) ); filter.apply(task, TransportShardBulkAction.ACTION_NAME, request, actionListener, actionFilterChain); awaitLatch(chainExecuted, 10, TimeUnit.SECONDS); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunkerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunkerTests.java index a82d2f474ca4..dec7d15760aa 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunkerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunkerTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingByte import org.elasticsearch.xpack.core.inference.results.InferenceTextEmbeddingFloatResults; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; import org.elasticsearch.xpack.core.ml.search.WeightedToken; +import org.hamcrest.Matchers; import java.util.ArrayList; import java.util.List; @@ -31,16 +32,62 @@ import static org.hamcrest.Matchers.startsWith; public class EmbeddingRequestChunkerTests extends ESTestCase { - public void testEmptyInput() { + public void testEmptyInput_WordChunker() { var embeddingType = randomFrom(EmbeddingRequestChunker.EmbeddingType.values()); var batches = new EmbeddingRequestChunker(List.of(), 100, 100, 10, embeddingType).batchRequestsWithListeners(testListener()); assertThat(batches, empty()); } - public void testBlankInput() { + public void testEmptyInput_SentenceChunker() { + var embeddingType = randomFrom(EmbeddingRequestChunker.EmbeddingType.values()); + var batches = new EmbeddingRequestChunker(List.of(), 10, embeddingType, new SentenceBoundaryChunkingSettings(250, 1)) + .batchRequestsWithListeners(testListener()); + assertThat(batches, empty()); + } + + public void testWhitespaceInput_SentenceChunker() { + var embeddingType = randomFrom(EmbeddingRequestChunker.EmbeddingType.values()); + var batches = new EmbeddingRequestChunker(List.of(" "), 10, embeddingType, new SentenceBoundaryChunkingSettings(250, 1)) + .batchRequestsWithListeners(testListener()); + assertThat(batches, hasSize(1)); + assertThat(batches.get(0).batch().inputs(), hasSize(1)); + assertThat(batches.get(0).batch().inputs().get(0), Matchers.is(" ")); + } + + public void testBlankInput_WordChunker() { var embeddingType = randomFrom(EmbeddingRequestChunker.EmbeddingType.values()); var batches = new EmbeddingRequestChunker(List.of(""), 100, 100, 10, embeddingType).batchRequestsWithListeners(testListener()); assertThat(batches, hasSize(1)); + assertThat(batches.get(0).batch().inputs(), hasSize(1)); + assertThat(batches.get(0).batch().inputs().get(0), Matchers.is("")); + } + + public void testBlankInput_SentenceChunker() { + var embeddingType = randomFrom(EmbeddingRequestChunker.EmbeddingType.values()); + var batches = new EmbeddingRequestChunker(List.of(""), 10, embeddingType, new SentenceBoundaryChunkingSettings(250, 1)) + .batchRequestsWithListeners(testListener()); + assertThat(batches, hasSize(1)); + assertThat(batches.get(0).batch().inputs(), hasSize(1)); + assertThat(batches.get(0).batch().inputs().get(0), Matchers.is("")); + } + + public void testInputThatDoesNotChunk_WordChunker() { + var embeddingType = randomFrom(EmbeddingRequestChunker.EmbeddingType.values()); + var batches = new EmbeddingRequestChunker(List.of("ABBAABBA"), 100, 100, 10, embeddingType).batchRequestsWithListeners( + testListener() + ); + assertThat(batches, hasSize(1)); + assertThat(batches.get(0).batch().inputs(), hasSize(1)); + assertThat(batches.get(0).batch().inputs().get(0), Matchers.is("ABBAABBA")); + } + + public void testInputThatDoesNotChunk_SentenceChunker() { + var embeddingType = randomFrom(EmbeddingRequestChunker.EmbeddingType.values()); + var batches = new EmbeddingRequestChunker(List.of("ABBAABBA"), 10, embeddingType, new SentenceBoundaryChunkingSettings(250, 1)) + .batchRequestsWithListeners(testListener()); + assertThat(batches, hasSize(1)); + assertThat(batches.get(0).batch().inputs(), hasSize(1)); + assertThat(batches.get(0).batch().inputs().get(0), Matchers.is("ABBAABBA")); } public void testShortInputsAreSingleBatch() { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkerTests.java index de943f7f57ab..f81894ccd4bb 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/SentenceBoundaryChunkerTests.java @@ -43,6 +43,41 @@ public class SentenceBoundaryChunkerTests extends ESTestCase { return chunkPositions.stream().map(offset -> input.substring(offset.start(), offset.end())).collect(Collectors.toList()); } + public void testEmptyString() { + var chunks = textChunks(new SentenceBoundaryChunker(), "", 100, randomBoolean()); + assertThat(chunks, hasSize(1)); + assertThat(chunks.get(0), Matchers.is("")); + } + + public void testBlankString() { + var chunks = textChunks(new SentenceBoundaryChunker(), " ", 100, randomBoolean()); + assertThat(chunks, hasSize(1)); + assertThat(chunks.get(0), Matchers.is(" ")); + } + + public void testSingleChar() { + var chunks = textChunks(new SentenceBoundaryChunker(), " b", 100, randomBoolean()); + assertThat(chunks, Matchers.contains(" b")); + + chunks = textChunks(new SentenceBoundaryChunker(), "b", 100, randomBoolean()); + assertThat(chunks, Matchers.contains("b")); + + chunks = textChunks(new SentenceBoundaryChunker(), ". ", 100, randomBoolean()); + assertThat(chunks, Matchers.contains(". ")); + + chunks = textChunks(new SentenceBoundaryChunker(), " , ", 100, randomBoolean()); + assertThat(chunks, Matchers.contains(" , ")); + + chunks = textChunks(new SentenceBoundaryChunker(), " ,", 100, randomBoolean()); + assertThat(chunks, Matchers.contains(" ,")); + } + + public void testSingleCharRepeated() { + var input = "a".repeat(32_000); + var chunks = textChunks(new SentenceBoundaryChunker(), input, 100, randomBoolean()); + assertThat(chunks, Matchers.contains(input)); + } + public void testChunkSplitLargeChunkSizes() { for (int maxWordsPerChunk : new int[] { 100, 200 }) { var chunker = new SentenceBoundaryChunker(); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkerTests.java index 2ef28f2cf2e7..b4fa5c912225 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/WordBoundaryChunkerTests.java @@ -11,6 +11,7 @@ import com.ibm.icu.text.BreakIterator; import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matchers; import java.util.List; import java.util.Locale; @@ -71,10 +72,6 @@ public class WordBoundaryChunkerTests extends ESTestCase { * Use the chunk functions that return offsets where possible */ List textChunks(WordBoundaryChunker chunker, String input, int chunkSize, int overlap) { - if (input.isEmpty()) { - return List.of(""); - } - var chunkPositions = chunker.chunk(input, chunkSize, overlap); return chunkPositions.stream().map(p -> input.substring(p.start(), p.end())).collect(Collectors.toList()); } @@ -240,6 +237,35 @@ public class WordBoundaryChunkerTests extends ESTestCase { assertThat(chunks, contains(" ")); } + public void testBlankString() { + var chunks = textChunks(new WordBoundaryChunker(), " ", 100, 10); + assertThat(chunks, hasSize(1)); + assertThat(chunks.get(0), Matchers.is(" ")); + } + + public void testSingleChar() { + var chunks = textChunks(new WordBoundaryChunker(), " b", 100, 10); + assertThat(chunks, Matchers.contains(" b")); + + chunks = textChunks(new WordBoundaryChunker(), "b", 100, 10); + assertThat(chunks, Matchers.contains("b")); + + chunks = textChunks(new WordBoundaryChunker(), ". ", 100, 10); + assertThat(chunks, Matchers.contains(". ")); + + chunks = textChunks(new WordBoundaryChunker(), " , ", 100, 10); + assertThat(chunks, Matchers.contains(" , ")); + + chunks = textChunks(new WordBoundaryChunker(), " ,", 100, 10); + assertThat(chunks, Matchers.contains(" ,")); + } + + public void testSingleCharRepeated() { + var input = "a".repeat(32_000); + var chunks = textChunks(new WordBoundaryChunker(), input, 100, 10); + assertThat(chunks, Matchers.contains(input)); + } + public void testPunctuation() { int chunkSize = 1; var chunks = textChunks(new WordBoundaryChunker(), "Comma, separated", chunkSize, 0); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextNonDynamicFieldMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextNonDynamicFieldMapperTests.java index 1f58c4165056..24183b21f73e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextNonDynamicFieldMapperTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextNonDynamicFieldMapperTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.xpack.inference.mapper; import org.elasticsearch.index.mapper.NonDynamicFieldMapperTests; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; import org.elasticsearch.xpack.inference.Utils; import org.elasticsearch.xpack.inference.mock.TestSparseInferenceServiceExtension; import org.junit.Before; @@ -26,7 +27,7 @@ public class SemanticTextNonDynamicFieldMapperTests extends NonDynamicFieldMappe @Override protected Collection> getPlugins() { - return List.of(Utils.TestInferencePlugin.class); + return List.of(LocalStateInferencePlugin.class); } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankMultiNodeTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankMultiNodeTests.java index 6d6403b69ea1..daed03c198e0 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankMultiNodeTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankMultiNodeTests.java @@ -10,7 +10,7 @@ package org.elasticsearch.xpack.inference.rank.textsimilarity; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.rank.RankBuilder; import org.elasticsearch.search.rank.rerank.AbstractRerankerIT; -import org.elasticsearch.xpack.inference.InferencePlugin; +import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; import java.util.Collection; import java.util.List; @@ -40,7 +40,7 @@ public class TextSimilarityRankMultiNodeTests extends AbstractRerankerIT { @Override protected Collection> pluginsNeeded() { - return List.of(InferencePlugin.class, TextSimilarityTestPlugin.class); + return List.of(LocalStateInferencePlugin.class, TextSimilarityTestPlugin.class); } public void testQueryPhaseShardThrowingAllShardsFail() throws Exception { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverTelemetryTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverTelemetryTests.java index 084a7f3de4a5..ba6924ba0ff3 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverTelemetryTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankRetrieverTelemetryTests.java @@ -24,8 +24,7 @@ import org.elasticsearch.search.vectors.KnnVectorQueryBuilder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xpack.core.XPackPlugin; -import org.elasticsearch.xpack.inference.InferencePlugin; +import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; import org.junit.Before; import java.io.IOException; @@ -47,7 +46,7 @@ public class TextSimilarityRankRetrieverTelemetryTests extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return List.of(InferencePlugin.class, XPackPlugin.class, TextSimilarityTestPlugin.class); + return List.of(LocalStateInferencePlugin.class, TextSimilarityTestPlugin.class); } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java index a042fca44fdb..f81f2965c392 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/rank/textsimilarity/TextSimilarityRankTests.java @@ -20,7 +20,7 @@ import org.elasticsearch.search.rank.rerank.AbstractRerankerIT; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import org.elasticsearch.xpack.core.inference.action.InferenceAction; -import org.elasticsearch.xpack.inference.InferencePlugin; +import org.elasticsearch.xpack.inference.LocalStateInferencePlugin; import org.junit.Before; import java.util.Collection; @@ -108,7 +108,7 @@ public class TextSimilarityRankTests extends ESSingleNodeTestCase { @Override protected Collection> getPlugins() { - return List.of(InferencePlugin.class, TextSimilarityTestPlugin.class); + return List.of(LocalStateInferencePlugin.class, TextSimilarityTestPlugin.class); } @Before diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml index c2704a4c2291..3d3790d879ef 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/40_semantic_text_query.yml @@ -101,7 +101,7 @@ setup: index: test-sparse-index id: doc_1 body: - inference_field: ["inference test", "another inference test"] + inference_field: [ "inference test", "another inference test" ] non_inference_field: "non inference test" refresh: true @@ -132,7 +132,7 @@ setup: index: test-sparse-index id: doc_1 body: - inference_field: [40, 49.678] + inference_field: [ 40, 49.678 ] refresh: true - do: @@ -229,7 +229,7 @@ setup: index: test-dense-index id: doc_1 body: - inference_field: ["inference test", "another inference test"] + inference_field: [ "inference test", "another inference test" ] non_inference_field: "non inference test" refresh: true @@ -260,7 +260,7 @@ setup: index: test-dense-index id: doc_1 body: - inference_field: [45.1, 100] + inference_field: [ 45.1, 100 ] refresh: true - do: @@ -387,7 +387,7 @@ setup: index: test-dense-index id: doc_1 body: - inference_field: ["inference test", "another inference test"] + inference_field: [ "inference test", "another inference test" ] non_inference_field: "non inference test" refresh: true @@ -418,7 +418,7 @@ setup: index: test-sparse-index id: doc_1 body: - inference_field: ["inference test", "another inference test"] + inference_field: [ "inference test", "another inference test" ] non_inference_field: "non inference test" refresh: true @@ -440,7 +440,7 @@ setup: - match: { hits.hits.0._id: "doc_1" } - close_to: { hits.hits.0._score: { value: 3.783733e19, error: 1e13 } } - length: { hits.hits.0._source.inference_field.inference.chunks: 2 } - - match: { hits.hits.0.matched_queries: ["i-like-naming-my-queries"] } + - match: { hits.hits.0.matched_queries: [ "i-like-naming-my-queries" ] } --- "Query an index alias": @@ -452,7 +452,7 @@ setup: index: test-sparse-index id: doc_1 body: - inference_field: ["inference test", "another inference test"] + inference_field: [ "inference test", "another inference test" ] non_inference_field: "non inference test" refresh: true @@ -503,6 +503,48 @@ setup: - match: { error.root_cause.0.type: "illegal_argument_exception" } - match: { error.root_cause.0.reason: "Field [non_inference_field] of type [text] does not support semantic queries" } +--- +"Query the wrong field type with lenient: true": + - requires: + cluster_features: "search.semantic_match_query_rewrite_interception_supported" + reason: lenient introduced in 8.18.0 + + - do: + index: + index: test-sparse-index + id: doc_1 + body: + inference_field: "inference test" + non_inference_field: "non inference test" + refresh: true + + - do: + catch: bad_request + search: + index: test-sparse-index + body: + query: + semantic: + field: "non_inference_field" + query: "inference test" + + - match: { error.type: "search_phase_execution_exception" } + - match: { error.root_cause.0.type: "illegal_argument_exception" } + - match: { error.root_cause.0.reason: "Field [non_inference_field] of type [text] does not support semantic queries" } + + - do: + search: + index: test-sparse-index + body: + query: + semantic: + field: "non_inference_field" + query: "inference test" + lenient: true + + - match: { hits.total.value: 0 } + + --- "Query a missing field": - do: @@ -783,7 +825,7 @@ setup: index: test-dense-index id: doc_1 body: - inference_field: ["inference test", "another inference test"] + inference_field: [ "inference test", "another inference test" ] non_inference_field: "non inference test" refresh: true @@ -844,11 +886,11 @@ setup: "Query a field that uses the default ELSER 2 endpoint": - requires: reason: "default ELSER 2 inference ID is enabled via a capability" - test_runner_features: [capabilities] + test_runner_features: [ capabilities ] capabilities: - method: GET path: /_inference - capabilities: [default_elser_2] + capabilities: [ default_elser_2 ] - do: indices.create: diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/45_semantic_text_match.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/45_semantic_text_match.yml new file mode 100644 index 000000000000..cdbf73d31a27 --- /dev/null +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/45_semantic_text_match.yml @@ -0,0 +1,284 @@ +setup: + - requires: + cluster_features: "search.semantic_match_query_rewrite_interception_supported" + reason: semantic_text match support introduced in 8.18.0 + + - do: + inference.put: + task_type: sparse_embedding + inference_id: sparse-inference-id + body: > + { + "service": "test_service", + "service_settings": { + "model": "my_model", + "api_key": "abc64" + }, + "task_settings": { + } + } + + - do: + inference.put: + task_type: sparse_embedding + inference_id: sparse-inference-id-2 + body: > + { + "service": "test_service", + "service_settings": { + "model": "my_model", + "api_key": "abc64" + }, + "task_settings": { + } + } + + - do: + inference.put: + task_type: text_embedding + inference_id: dense-inference-id + body: > + { + "service": "text_embedding_test_service", + "service_settings": { + "model": "my_model", + "dimensions": 10, + "api_key": "abc64", + "similarity": "COSINE" + }, + "task_settings": { + } + } + + - do: + indices.create: + index: test-sparse-index + body: + mappings: + properties: + inference_field: + type: semantic_text + inference_id: sparse-inference-id + non_inference_field: + type: text + + - do: + indices.create: + index: test-dense-index + body: + mappings: + properties: + inference_field: + type: semantic_text + inference_id: dense-inference-id + non_inference_field: + type: text + + - do: + indices.create: + index: test-text-only-index + body: + mappings: + properties: + inference_field: + type: text + non_inference_field: + type: text + +--- +"Query using a sparse embedding model": + - skip: + features: [ "headers", "close_to" ] + + - do: + index: + index: test-sparse-index + id: doc_1 + body: + inference_field: [ "inference test", "another inference test" ] + non_inference_field: "non inference test" + refresh: true + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-sparse-index + body: + query: + match: + inference_field: + query: "inference test" + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + +--- +"Query using a dense embedding model": + - skip: + features: [ "headers", "close_to" ] + + - do: + index: + index: test-dense-index + id: doc_1 + body: + inference_field: [ "inference test", "another inference test" ] + non_inference_field: "non inference test" + refresh: true + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-dense-index + body: + query: + match: + inference_field: + query: "inference test" + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + +--- +"Query an index alias": + - skip: + features: [ "headers", "close_to" ] + + - do: + index: + index: test-sparse-index + id: doc_1 + body: + inference_field: [ "inference test", "another inference test" ] + non_inference_field: "non inference test" + refresh: true + + - do: + indices.put_alias: + index: test-sparse-index + name: my-alias + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: my-alias + body: + query: + match: + inference_field: + query: "inference test" + + - match: { hits.total.value: 1 } + - match: { hits.hits.0._id: "doc_1" } + +--- +"Query indices with both semantic_text and regular text content": + + - do: + index: + index: test-sparse-index + id: doc_1 + body: + inference_field: [ "inference test", "another inference test" ] + non_inference_field: "non inference test" + refresh: true + + - do: + index: + index: test-text-only-index + id: doc_2 + body: + inference_field: [ "inference test", "not an inference field" ] + non_inference_field: "non inference test" + refresh: true + + - do: + search: + index: + - test-sparse-index + - test-text-only-index + body: + query: + match: + inference_field: + query: "inference test" + + - match: { hits.total.value: 2 } + - match: { hits.hits.0._id: "doc_1" } + - match: { hits.hits.1._id: "doc_2" } + + # Test querying multiple indices that either use the same inference ID or combine semantic_text with lexical search + - do: + indices.create: + index: test-sparse-index-2 + body: + mappings: + properties: + inference_field: + type: semantic_text + inference_id: sparse-inference-id + non_inference_field: + type: text + + - do: + index: + index: test-sparse-index-2 + id: doc_3 + body: + inference_field: "another inference test" + refresh: true + + - do: + search: + index: + - test-sparse-index* + - test-text-only-index + body: + query: + match: + inference_field: + query: "inference test" + + - match: { hits.total.value: 3 } + - match: { hits.hits.0._id: "doc_1" } + - match: { hits.hits.1._id: "doc_3" } + - match: { hits.hits.2._id: "doc_2" } + +--- +"Query a field that has no indexed inference results": + - skip: + features: [ "headers" ] + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-sparse-index + body: + query: + match: + inference_field: + query: "inference test" + + - match: { hits.total.value: 0 } + + - do: + headers: + # Force JSON content type so that we use a parser that interprets the floating-point score as a double + Content-Type: application/json + search: + index: test-dense-index + body: + query: + match: + inference_field: + query: "inference test" + + - match: { hits.total.value: 0 } diff --git a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamTransportActionIT.java b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamTransportActionIT.java index 7f2243ed7684..6e24e644cb2a 100644 --- a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamTransportActionIT.java +++ b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamTransportActionIT.java @@ -117,6 +117,23 @@ public class ReindexDataStreamTransportActionIT extends ESIntegTestCase { assertThat(status.totalIndices(), equalTo(backingIndexCount)); assertThat(status.totalIndicesToBeUpgraded(), equalTo(0)); }); + AcknowledgedResponse cancelResponse = client().execute( + CancelReindexDataStreamAction.INSTANCE, + new CancelReindexDataStreamAction.Request(dataStreamName) + ).actionGet(); + assertNotNull(cancelResponse); + assertThrows( + ResourceNotFoundException.class, + () -> client().execute(CancelReindexDataStreamAction.INSTANCE, new CancelReindexDataStreamAction.Request(dataStreamName)) + .actionGet() + ); + assertThrows( + ResourceNotFoundException.class, + () -> client().execute( + new ActionType(GetMigrationReindexStatusAction.NAME), + new GetMigrationReindexStatusAction.Request(dataStreamName) + ).actionGet() + ); } private int createDataStream(String dataStreamName) { diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java index 1af66a2c61d5..26f8e57102a4 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/MigratePlugin.java @@ -32,10 +32,13 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xpack.migrate.action.CancelReindexDataStreamAction; +import org.elasticsearch.xpack.migrate.action.CancelReindexDataStreamTransportAction; import org.elasticsearch.xpack.migrate.action.GetMigrationReindexStatusAction; import org.elasticsearch.xpack.migrate.action.GetMigrationReindexStatusTransportAction; import org.elasticsearch.xpack.migrate.action.ReindexDataStreamAction; import org.elasticsearch.xpack.migrate.action.ReindexDataStreamTransportAction; +import org.elasticsearch.xpack.migrate.rest.RestCancelReindexDataStreamAction; import org.elasticsearch.xpack.migrate.rest.RestGetMigrationReindexStatusAction; import org.elasticsearch.xpack.migrate.rest.RestMigrationReindexAction; import org.elasticsearch.xpack.migrate.task.ReindexDataStreamPersistentTaskExecutor; @@ -69,6 +72,7 @@ public class MigratePlugin extends Plugin implements ActionPlugin, PersistentTas if (REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()) { handlers.add(new RestMigrationReindexAction()); handlers.add(new RestGetMigrationReindexStatusAction()); + handlers.add(new RestCancelReindexDataStreamAction()); } return handlers; } @@ -79,6 +83,7 @@ public class MigratePlugin extends Plugin implements ActionPlugin, PersistentTas if (REINDEX_DATA_STREAM_FEATURE_FLAG.isEnabled()) { actions.add(new ActionHandler<>(ReindexDataStreamAction.INSTANCE, ReindexDataStreamTransportAction.class)); actions.add(new ActionHandler<>(GetMigrationReindexStatusAction.INSTANCE, GetMigrationReindexStatusTransportAction.class)); + actions.add(new ActionHandler<>(CancelReindexDataStreamAction.INSTANCE, CancelReindexDataStreamTransportAction.class)); } return actions; } diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CancelReindexDataStreamAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CancelReindexDataStreamAction.java new file mode 100644 index 000000000000..635d8b8f3097 --- /dev/null +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CancelReindexDataStreamAction.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.migrate.action; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.Objects; + +public class CancelReindexDataStreamAction extends ActionType { + + public static final CancelReindexDataStreamAction INSTANCE = new CancelReindexDataStreamAction(); + public static final String NAME = "indices:admin/data_stream/reindex_cancel"; + + public CancelReindexDataStreamAction() { + super(NAME); + } + + public static class Request extends ActionRequest implements IndicesRequest { + private final String index; + + public Request(String index) { + super(); + this.index = index; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.index = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(index); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public boolean getShouldStoreResult() { + return true; + } + + public String getIndex() { + return index; + } + + @Override + public int hashCode() { + return Objects.hashCode(index); + } + + @Override + public boolean equals(Object other) { + return other instanceof Request && index.equals(((Request) other).index); + } + + public Request nodeRequest(String thisNodeId, long thisTaskId) { + Request copy = new Request(index); + copy.setParentTask(thisNodeId, thisTaskId); + return copy; + } + + @Override + public String[] indices() { + return new String[] { index }; + } + + @Override + public IndicesOptions indicesOptions() { + return IndicesOptions.strictSingleIndexNoExpandForbidClosed(); + } + } +} diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CancelReindexDataStreamTransportAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CancelReindexDataStreamTransportAction.java new file mode 100644 index 000000000000..00a846bf7eb9 --- /dev/null +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/CancelReindexDataStreamTransportAction.java @@ -0,0 +1,57 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.migrate.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.persistent.PersistentTasksCustomMetadata; +import org.elasticsearch.persistent.PersistentTasksService; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.migrate.action.CancelReindexDataStreamAction.Request; + +public class CancelReindexDataStreamTransportAction extends HandledTransportAction { + private final PersistentTasksService persistentTasksService; + + @Inject + public CancelReindexDataStreamTransportAction( + TransportService transportService, + ActionFilters actionFilters, + PersistentTasksService persistentTasksService + ) { + super(CancelReindexDataStreamAction.NAME, transportService, actionFilters, Request::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); + this.persistentTasksService = persistentTasksService; + } + + @Override + protected void doExecute(Task task, Request request, ActionListener listener) { + String index = request.getIndex(); + String persistentTaskId = ReindexDataStreamAction.TASK_ID_PREFIX + index; + /* + * This removes the persistent task from the cluster state and results in the running task being cancelled (but not removed from + * the task manager). The running task is removed from the task manager in ReindexDataStreamTask::onCancelled, which is called as + * as result of this. + */ + persistentTasksService.sendRemoveRequest(persistentTaskId, TimeValue.MAX_VALUE, new ActionListener<>() { + @Override + public void onResponse(PersistentTasksCustomMetadata.PersistentTask persistentTask) { + listener.onResponse(AcknowledgedResponse.TRUE); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } +} diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/rest/RestCancelReindexDataStreamAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/rest/RestCancelReindexDataStreamAction.java new file mode 100644 index 000000000000..0bd68e8b2df7 --- /dev/null +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/rest/RestCancelReindexDataStreamAction.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.migrate.rest; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.migrate.action.CancelReindexDataStreamAction; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.POST; + +public class RestCancelReindexDataStreamAction extends BaseRestHandler { + + @Override + public String getName() { + return "cancel_reindex_data_stream_action"; + } + + @Override + public List routes() { + return List.of(new Route(POST, "/_migration/reindex/{index}/_cancel")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + String index = request.param("index"); + CancelReindexDataStreamAction.Request cancelTaskRequest = new CancelReindexDataStreamAction.Request(index); + return channel -> client.execute(CancelReindexDataStreamAction.INSTANCE, cancelTaskRequest, new RestToXContentListener<>(channel)); + } +} diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamPersistentTaskExecutor.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamPersistentTaskExecutor.java index 65d959f04750..edd6caf157be 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamPersistentTaskExecutor.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamPersistentTaskExecutor.java @@ -91,13 +91,11 @@ public class ReindexDataStreamPersistentTaskExecutor extends PersistentTasksExec } private void completeSuccessfulPersistentTask(ReindexDataStreamTask persistentTask) { - persistentTask.allReindexesCompleted(); - threadPool.schedule(persistentTask::markAsCompleted, getTimeToLive(persistentTask), threadPool.generic()); + persistentTask.allReindexesCompleted(threadPool, getTimeToLive(persistentTask)); } private void completeFailedPersistentTask(ReindexDataStreamTask persistentTask, Exception e) { - persistentTask.taskFailed(e); - threadPool.schedule(() -> persistentTask.markAsFailed(e), getTimeToLive(persistentTask), threadPool.generic()); + persistentTask.taskFailed(threadPool, getTimeToLive(persistentTask), e); } private TimeValue getTimeToLive(ReindexDataStreamTask reindexDataStreamTask) { diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamTask.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamTask.java index 72ddb87e9dea..844f24f45ab7 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamTask.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/task/ReindexDataStreamTask.java @@ -7,9 +7,12 @@ package org.elasticsearch.xpack.migrate.task; +import org.elasticsearch.common.util.concurrent.RunOnce; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.persistent.AllocatedPersistentTask; import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; import java.util.List; @@ -21,12 +24,14 @@ public class ReindexDataStreamTask extends AllocatedPersistentTask { private final long persistentTaskStartTime; private final int totalIndices; private final int totalIndicesToBeUpgraded; - private boolean complete = false; - private Exception exception; - private AtomicInteger inProgress = new AtomicInteger(0); - private AtomicInteger pending = new AtomicInteger(); - private List> errors = new ArrayList<>(); + private volatile boolean complete = false; + private volatile Exception exception; + private final AtomicInteger inProgress = new AtomicInteger(0); + private final AtomicInteger pending = new AtomicInteger(); + private final List> errors = new ArrayList<>(); + private final RunOnce completeTask; + @SuppressWarnings("this-escape") public ReindexDataStreamTask( long persistentTaskStartTime, int totalIndices, @@ -42,6 +47,13 @@ public class ReindexDataStreamTask extends AllocatedPersistentTask { this.persistentTaskStartTime = persistentTaskStartTime; this.totalIndices = totalIndices; this.totalIndicesToBeUpgraded = totalIndicesToBeUpgraded; + this.completeTask = new RunOnce(() -> { + if (exception == null) { + markAsCompleted(); + } else { + markAsFailed(exception); + } + }); } @Override @@ -58,13 +70,18 @@ public class ReindexDataStreamTask extends AllocatedPersistentTask { ); } - public void allReindexesCompleted() { + public void allReindexesCompleted(ThreadPool threadPool, TimeValue timeToLive) { this.complete = true; + if (isCancelled()) { + completeTask.run(); + } else { + threadPool.schedule(completeTask, timeToLive, threadPool.generic()); + } } - public void taskFailed(Exception e) { - this.complete = true; + public void taskFailed(ThreadPool threadPool, TimeValue timeToLive, Exception e) { this.exception = e; + allReindexesCompleted(threadPool, timeToLive); } public void reindexSucceeded() { @@ -84,4 +101,16 @@ public class ReindexDataStreamTask extends AllocatedPersistentTask { public void setPendingIndicesCount(int size) { pending.set(size); } + + @Override + public void onCancelled() { + /* + * If the task is complete, but just waiting for its scheduled removal, we go ahead and call markAsCompleted/markAsFailed + * immediately. This results in the running task being removed from the task manager. If the task is not complete, then one of + * allReindexesCompleted or taskFailed will be called in the future, resulting in the same thing. + */ + if (complete) { + completeTask.run(); + } + } } diff --git a/x-pack/plugin/migrate/src/test/java/org/elasticsearch/xpack/migrate/action/CancelReindexDataStreamRequestTests.java b/x-pack/plugin/migrate/src/test/java/org/elasticsearch/xpack/migrate/action/CancelReindexDataStreamRequestTests.java new file mode 100644 index 000000000000..187561dae19b --- /dev/null +++ b/x-pack/plugin/migrate/src/test/java/org/elasticsearch/xpack/migrate/action/CancelReindexDataStreamRequestTests.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.migrate.action; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.migrate.action.CancelReindexDataStreamAction.Request; + +import java.io.IOException; + +public class CancelReindexDataStreamRequestTests extends AbstractWireSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return Request::new; + } + + @Override + protected Request createTestInstance() { + return new Request(randomAlphaOfLength(30)); + } + + @Override + protected Request mutateInstance(Request instance) throws IOException { + return new Request(instance.getIndex() + randomAlphaOfLength(5)); + } +} diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PyTorchModelIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PyTorchModelIT.java index 4e92cad1026a..04f349d67d7f 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PyTorchModelIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PyTorchModelIT.java @@ -1142,6 +1142,22 @@ public class PyTorchModelIT extends PyTorchModelRestTestCase { } } + public void testInferEmptyInput() throws IOException { + String modelId = "empty_input"; + createPassThroughModel(modelId); + putModelDefinition(modelId); + putVocabulary(List.of("these", "are", "my", "words"), modelId); + startDeployment(modelId); + + Request request = new Request("POST", "/_ml/trained_models/" + modelId + "/_infer?timeout=30s"); + request.setJsonEntity(""" + { "docs": [] } + """); + + var inferenceResponse = client().performRequest(request); + assertThat(EntityUtils.toString(inferenceResponse.getEntity()), equalTo("{\"inference_results\":[]}")); + } + private void putModelDefinition(String modelId) throws IOException { putModelDefinition(modelId, BASE_64_ENCODED_MODEL, RAW_MODEL_SIZE); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java index e0405b174953..20a4ceeae59b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java @@ -132,6 +132,11 @@ public class TransportInternalInferModelAction extends HandledTransportAction ssb.parseXContent(parser, true, nf -> true) - .rewrite(new QueryRewriteContext(parserConfig(), null, null, null, new PointInTimeBuilder(new BytesArray("pitid")))) + .rewrite( + new QueryRewriteContext(parserConfig(), null, null, null, new PointInTimeBuilder(new BytesArray("pitid")), null) + ) ); assertEquals("[search_after] cannot be used in children of compound retrievers", iae.getMessage()); } @@ -70,7 +72,9 @@ public class RRFRetrieverBuilderTests extends ESTestCase { IllegalArgumentException iae = expectThrows( IllegalArgumentException.class, () -> ssb.parseXContent(parser, true, nf -> true) - .rewrite(new QueryRewriteContext(parserConfig(), null, null, null, new PointInTimeBuilder(new BytesArray("pitid")))) + .rewrite( + new QueryRewriteContext(parserConfig(), null, null, null, new PointInTimeBuilder(new BytesArray("pitid")), null) + ) ); assertEquals("[terminate_after] cannot be used in children of compound retrievers", iae.getMessage()); } diff --git a/x-pack/plugin/security/qa/consistency-checks/src/test/java/org/elasticsearch/xpack/security/CrossClusterShardTests.java b/x-pack/plugin/security/qa/consistency-checks/src/test/java/org/elasticsearch/xpack/security/CrossClusterShardTests.java index ab5be0f48f5f..057ebdece5c6 100644 --- a/x-pack/plugin/security/qa/consistency-checks/src/test/java/org/elasticsearch/xpack/security/CrossClusterShardTests.java +++ b/x-pack/plugin/security/qa/consistency-checks/src/test/java/org/elasticsearch/xpack/security/CrossClusterShardTests.java @@ -35,7 +35,6 @@ import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.elasticsearch.xpack.frozen.FrozenIndices; import org.elasticsearch.xpack.graph.Graph; import org.elasticsearch.xpack.ilm.IndexLifecycle; -import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.profiling.ProfilingPlugin; import org.elasticsearch.xpack.rollup.Rollup; import org.elasticsearch.xpack.search.AsyncSearch; @@ -89,7 +88,6 @@ public class CrossClusterShardTests extends ESSingleNodeTestCase { FrozenIndices.class, Graph.class, IndexLifecycle.class, - InferencePlugin.class, IngestCommonPlugin.class, IngestTestPlugin.class, MustachePlugin.class, diff --git a/x-pack/plugin/security/qa/multi-cluster/build.gradle b/x-pack/plugin/security/qa/multi-cluster/build.gradle index 5b682cfdccad..f4eee4ef46c0 100644 --- a/x-pack/plugin/security/qa/multi-cluster/build.gradle +++ b/x-pack/plugin/security/qa/multi-cluster/build.gradle @@ -24,6 +24,7 @@ dependencies { clusterModules project(':x-pack:plugin:enrich') clusterModules project(':x-pack:plugin:autoscaling') clusterModules project(':x-pack:plugin:ml') + clusterModules project(xpackModule('ilm')) clusterModules(project(":modules:ingest-common")) } diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityDataStreamEsqlRcs1IT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityDataStreamEsqlRcs1IT.java new file mode 100644 index 000000000000..57eb583912c4 --- /dev/null +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityDataStreamEsqlRcs1IT.java @@ -0,0 +1,402 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.remotecluster; + +import org.elasticsearch.Build; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Response; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.core.Strings; +import org.elasticsearch.test.MapMatcher; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.util.resource.Resource; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.json.JsonXContent; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +import java.io.IOException; +import java.util.List; +import java.util.Locale; + +import static org.elasticsearch.test.ListMatcher.matchesList; +import static org.elasticsearch.test.MapMatcher.assertMap; +import static org.elasticsearch.test.MapMatcher.matchesMap; + +// TODO consolidate me with RemoteClusterSecurityDataStreamEsqlRcs2IT +public class RemoteClusterSecurityDataStreamEsqlRcs1IT extends AbstractRemoteClusterSecurityTestCase { + static { + fulfillingCluster = ElasticsearchCluster.local() + .name("fulfilling-cluster") + .module("x-pack-autoscaling") + .module("x-pack-esql") + .module("x-pack-enrich") + .module("x-pack-ml") + .module("x-pack-ilm") + .module("ingest-common") + .apply(commonClusterConfig) + .setting("xpack.ml.enabled", "false") + .setting("xpack.security.authc.token.enabled", "true") + .rolesFile(Resource.fromClasspath("roles.yml")) + .build(); + + queryCluster = ElasticsearchCluster.local() + .name("query-cluster") + .module("x-pack-autoscaling") + .module("x-pack-esql") + .module("x-pack-enrich") + .module("x-pack-ml") + .module("x-pack-ilm") + .module("ingest-common") + .apply(commonClusterConfig) + .setting("xpack.ml.enabled", "false") + .setting("xpack.security.authc.token.enabled", "true") + .rolesFile(Resource.fromClasspath("roles.yml")) + .user("logs_foo_all", "x-pack-test-password", "logs_foo_all", false) + .user("logs_foo_16_only", "x-pack-test-password", "logs_foo_16_only", false) + .user("logs_foo_after_2021", "x-pack-test-password", "logs_foo_after_2021", false) + .user("logs_foo_after_2021_pattern", "x-pack-test-password", "logs_foo_after_2021_pattern", false) + .user("logs_foo_after_2021_alias", "x-pack-test-password", "logs_foo_after_2021_alias", false) + .build(); + } + + @ClassRule + public static TestRule clusterRule = RuleChain.outerRule(fulfillingCluster).around(queryCluster); + + public void testDataStreamsWithDlsAndFls() throws Exception { + configureRemoteCluster(REMOTE_CLUSTER_ALIAS, fulfillingCluster, true, randomBoolean(), randomBoolean()); + createDataStreamOnFulfillingCluster(); + setupAdditionalUsersAndRoles(); + + doTestDataStreamsWithFlsAndDls(); + } + + private void setupAdditionalUsersAndRoles() throws IOException { + createUserAndRoleOnQueryCluster("fls_user_logs_pattern", "fls_user_logs_pattern", """ + { + "indices": [ + { + "names": ["logs-*"], + "privileges": ["read"], + "field_security": { + "grant": ["@timestamp", "data_stream.namespace"] + } + } + ] + }"""); + createUserAndRoleOnFulfillingCluster("fls_user_logs_pattern", "fls_user_logs_pattern", """ + { + "indices": [ + { + "names": ["logs-*"], + "privileges": ["read"], + "field_security": { + "grant": ["@timestamp", "data_stream.namespace"] + } + } + ] + }"""); + } + + static void createUserAndRoleOnQueryCluster(String username, String roleName, String roleJson) throws IOException { + final var putRoleRequest = new Request("PUT", "/_security/role/" + roleName); + putRoleRequest.setJsonEntity(roleJson); + assertOK(adminClient().performRequest(putRoleRequest)); + + final var putUserRequest = new Request("PUT", "/_security/user/" + username); + putUserRequest.setJsonEntity(Strings.format(""" + { + "password": "%s", + "roles" : ["%s"] + }""", PASS, roleName)); + assertOK(adminClient().performRequest(putUserRequest)); + } + + static void createUserAndRoleOnFulfillingCluster(String username, String roleName, String roleJson) throws IOException { + final var putRoleRequest = new Request("PUT", "/_security/role/" + roleName); + putRoleRequest.setJsonEntity(roleJson); + assertOK(performRequestAgainstFulfillingCluster(putRoleRequest)); + + final var putUserRequest = new Request("PUT", "/_security/user/" + username); + putUserRequest.setJsonEntity(Strings.format(""" + { + "password": "%s", + "roles" : ["%s"] + }""", PASS, roleName)); + assertOK(performRequestAgainstFulfillingCluster(putUserRequest)); + } + + static Response runESQLCommandAgainstQueryCluster(String user, String command) throws IOException { + if (command.toLowerCase(Locale.ROOT).contains("limit") == false) { + // add a (high) limit to avoid warnings on default limit + command += " | limit 10000000"; + } + XContentBuilder json = JsonXContent.contentBuilder(); + json.startObject(); + json.field("query", command); + addRandomPragmas(json); + json.endObject(); + Request request = new Request("POST", "_query"); + request.setJsonEntity(org.elasticsearch.common.Strings.toString(json)); + request.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("es-security-runas-user", user)); + request.addParameter("error_trace", "true"); + Response response = adminClient().performRequest(request); + assertOK(response); + return response; + } + + static void addRandomPragmas(XContentBuilder builder) throws IOException { + if (Build.current().isSnapshot()) { + Settings pragmas = randomPragmas(); + if (pragmas != Settings.EMPTY) { + builder.startObject("pragma"); + builder.value(pragmas); + builder.endObject(); + } + } + } + + static Settings randomPragmas() { + Settings.Builder settings = Settings.builder(); + if (randomBoolean()) { + settings.put("page_size", between(1, 5)); + } + if (randomBoolean()) { + settings.put("exchange_buffer_size", between(1, 2)); + } + if (randomBoolean()) { + settings.put("data_partitioning", randomFrom("shard", "segment", "doc")); + } + if (randomBoolean()) { + settings.put("enrich_max_workers", between(1, 5)); + } + if (randomBoolean()) { + settings.put("node_level_reduction", randomBoolean()); + } + return settings.build(); + } + + static void createDataStreamOnFulfillingCluster() throws Exception { + createDataStreamPolicy(AbstractRemoteClusterSecurityTestCase::performRequestAgainstFulfillingCluster); + createDataStreamComponentTemplate(AbstractRemoteClusterSecurityTestCase::performRequestAgainstFulfillingCluster); + createDataStreamIndexTemplate(AbstractRemoteClusterSecurityTestCase::performRequestAgainstFulfillingCluster); + createDataStreamDocuments(AbstractRemoteClusterSecurityTestCase::performRequestAgainstFulfillingCluster); + createDataStreamAlias(AbstractRemoteClusterSecurityTestCase::performRequestAgainstFulfillingCluster); + } + + private static void createDataStreamPolicy(CheckedFunction requestConsumer) throws Exception { + Request request = new Request("PUT", "_ilm/policy/my-lifecycle-policy"); + request.setJsonEntity(""" + { + "policy": { + "phases": { + "hot": { + "actions": { + "rollover": { + "max_primary_shard_size": "50gb" + } + } + }, + "delete": { + "min_age": "735d", + "actions": { + "delete": {} + } + } + } + } + }"""); + + requestConsumer.apply(request); + } + + private static void createDataStreamComponentTemplate(CheckedFunction requestConsumer) throws Exception { + Request request = new Request("PUT", "_component_template/my-template"); + request.setJsonEntity(""" + { + "template": { + "settings": { + "index.lifecycle.name": "my-lifecycle-policy" + }, + "mappings": { + "properties": { + "@timestamp": { + "type": "date", + "format": "date_optional_time||epoch_millis" + }, + "data_stream": { + "properties": { + "namespace": {"type": "keyword"}, + "environment": {"type": "keyword"} + } + } + } + } + } + }"""); + requestConsumer.apply(request); + } + + private static void createDataStreamIndexTemplate(CheckedFunction requestConsumer) throws Exception { + Request request = new Request("PUT", "_index_template/my-index-template"); + request.setJsonEntity(""" + { + "index_patterns": ["logs-*"], + "data_stream": {}, + "composed_of": ["my-template"], + "priority": 500 + }"""); + requestConsumer.apply(request); + } + + private static void createDataStreamDocuments(CheckedFunction requestConsumer) throws Exception { + Request request = new Request("POST", "logs-foo/_bulk"); + request.addParameter("refresh", ""); + request.setJsonEntity(""" + { "create" : {} } + { "@timestamp": "2099-05-06T16:21:15.000Z", "data_stream": {"namespace": "16", "environment": "dev"} } + { "create" : {} } + { "@timestamp": "2001-05-06T16:21:15.000Z", "data_stream": {"namespace": "17", "environment": "prod"} } + """); + assertMap(entityAsMap(requestConsumer.apply(request)), matchesMap().extraOk().entry("errors", false)); + } + + private static void createDataStreamAlias(CheckedFunction requestConsumer) throws Exception { + Request request = new Request("PUT", "_alias"); + request.setJsonEntity(""" + { + "actions": [ + { + "add": { + "index": "logs-foo", + "alias": "alias-foo" + } + } + ] + }"""); + assertMap(entityAsMap(requestConsumer.apply(request)), matchesMap().extraOk().entry("errors", false)); + } + + static void doTestDataStreamsWithFlsAndDls() throws IOException { + // DLS + MapMatcher twoResults = matchesMap().extraOk().entry("values", matchesList().item(matchesList().item(2))); + MapMatcher oneResult = matchesMap().extraOk().entry("values", matchesList().item(matchesList().item(1))); + assertMap( + entityAsMap(runESQLCommandAgainstQueryCluster("logs_foo_all", "FROM my_remote_cluster:logs-foo | STATS COUNT(*)")), + twoResults + ); + assertMap( + entityAsMap(runESQLCommandAgainstQueryCluster("logs_foo_16_only", "FROM my_remote_cluster:logs-foo | STATS COUNT(*)")), + oneResult + ); + assertMap( + entityAsMap(runESQLCommandAgainstQueryCluster("logs_foo_after_2021", "FROM my_remote_cluster:logs-foo | STATS COUNT(*)")), + oneResult + ); + assertMap( + entityAsMap( + runESQLCommandAgainstQueryCluster("logs_foo_after_2021_pattern", "FROM my_remote_cluster:logs-foo | STATS COUNT(*)") + ), + oneResult + ); + assertMap( + entityAsMap(runESQLCommandAgainstQueryCluster("logs_foo_all", "FROM my_remote_cluster:logs-* | STATS COUNT(*)")), + twoResults + ); + assertMap( + entityAsMap(runESQLCommandAgainstQueryCluster("logs_foo_16_only", "FROM my_remote_cluster:logs-* | STATS COUNT(*)")), + oneResult + ); + assertMap( + entityAsMap(runESQLCommandAgainstQueryCluster("logs_foo_after_2021", "FROM my_remote_cluster:logs-* | STATS COUNT(*)")), + oneResult + ); + assertMap( + entityAsMap(runESQLCommandAgainstQueryCluster("logs_foo_after_2021_pattern", "FROM my_remote_cluster:logs-* | STATS COUNT(*)")), + oneResult + ); + + assertMap( + entityAsMap( + runESQLCommandAgainstQueryCluster("logs_foo_after_2021_alias", "FROM my_remote_cluster:alias-foo | STATS COUNT(*)") + ), + oneResult + ); + assertMap( + entityAsMap(runESQLCommandAgainstQueryCluster("logs_foo_after_2021_alias", "FROM my_remote_cluster:alias-* | STATS COUNT(*)")), + oneResult + ); + + // FLS + // logs_foo_all does not have FLS restrictions so should be able to access all fields + assertMap( + entityAsMap( + runESQLCommandAgainstQueryCluster("logs_foo_all", "FROM my_remote_cluster:logs-foo | SORT data_stream.namespace | LIMIT 1") + ), + matchesMap().extraOk() + .entry( + "columns", + List.of( + matchesMap().entry("name", "@timestamp").entry("type", "date"), + matchesMap().entry("name", "data_stream.environment").entry("type", "keyword"), + matchesMap().entry("name", "data_stream.namespace").entry("type", "keyword") + ) + ) + ); + assertMap( + entityAsMap( + runESQLCommandAgainstQueryCluster("logs_foo_all", "FROM my_remote_cluster:logs-* | SORT data_stream.namespace | LIMIT 1") + ), + matchesMap().extraOk() + .entry( + "columns", + List.of( + matchesMap().entry("name", "@timestamp").entry("type", "date"), + matchesMap().entry("name", "data_stream.environment").entry("type", "keyword"), + matchesMap().entry("name", "data_stream.namespace").entry("type", "keyword") + ) + ) + ); + + assertMap( + entityAsMap( + runESQLCommandAgainstQueryCluster( + "fls_user_logs_pattern", + "FROM my_remote_cluster:logs-foo | SORT data_stream.namespace | LIMIT 1" + ) + ), + matchesMap().extraOk() + .entry( + "columns", + List.of( + matchesMap().entry("name", "@timestamp").entry("type", "date"), + matchesMap().entry("name", "data_stream.namespace").entry("type", "keyword") + ) + ) + ); + assertMap( + entityAsMap( + runESQLCommandAgainstQueryCluster( + "fls_user_logs_pattern", + "FROM my_remote_cluster:logs-* | SORT data_stream.namespace | LIMIT 1" + ) + ), + matchesMap().extraOk() + .entry( + "columns", + List.of( + matchesMap().entry("name", "@timestamp").entry("type", "date"), + matchesMap().entry("name", "data_stream.namespace").entry("type", "keyword") + ) + ) + ); + } +} diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityDataStreamEsqlRcs2IT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityDataStreamEsqlRcs2IT.java new file mode 100644 index 000000000000..c5cf70417702 --- /dev/null +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityDataStreamEsqlRcs2IT.java @@ -0,0 +1,126 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.remotecluster; + +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.util.resource.Resource; +import org.elasticsearch.test.junit.RunnableTestRuleAdapter; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +import java.io.IOException; +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; + +import static org.elasticsearch.xpack.remotecluster.RemoteClusterSecurityDataStreamEsqlRcs1IT.createDataStreamOnFulfillingCluster; +import static org.elasticsearch.xpack.remotecluster.RemoteClusterSecurityDataStreamEsqlRcs1IT.createUserAndRoleOnQueryCluster; +import static org.elasticsearch.xpack.remotecluster.RemoteClusterSecurityDataStreamEsqlRcs1IT.doTestDataStreamsWithFlsAndDls; + +// TODO consolidate me with RemoteClusterSecurityDataStreamEsqlRcs1IT +public class RemoteClusterSecurityDataStreamEsqlRcs2IT extends AbstractRemoteClusterSecurityTestCase { + private static final AtomicReference> API_KEY_MAP_REF = new AtomicReference<>(); + private static final AtomicBoolean SSL_ENABLED_REF = new AtomicBoolean(); + private static final AtomicBoolean NODE1_RCS_SERVER_ENABLED = new AtomicBoolean(); + private static final AtomicBoolean NODE2_RCS_SERVER_ENABLED = new AtomicBoolean(); + + static { + fulfillingCluster = ElasticsearchCluster.local() + .name("fulfilling-cluster") + .nodes(3) + .module("x-pack-autoscaling") + .module("x-pack-esql") + .module("x-pack-enrich") + .module("x-pack-ml") + .module("x-pack-ilm") + .module("ingest-common") + .apply(commonClusterConfig) + .setting("remote_cluster.port", "0") + .setting("xpack.ml.enabled", "false") + .setting("xpack.security.remote_cluster_server.ssl.enabled", () -> String.valueOf(SSL_ENABLED_REF.get())) + .setting("xpack.security.remote_cluster_server.ssl.key", "remote-cluster.key") + .setting("xpack.security.remote_cluster_server.ssl.certificate", "remote-cluster.crt") + .setting("xpack.security.authc.token.enabled", "true") + .keystore("xpack.security.remote_cluster_server.ssl.secure_key_passphrase", "remote-cluster-password") + .node(0, spec -> spec.setting("remote_cluster_server.enabled", "true")) + .node(1, spec -> spec.setting("remote_cluster_server.enabled", () -> String.valueOf(NODE1_RCS_SERVER_ENABLED.get()))) + .node(2, spec -> spec.setting("remote_cluster_server.enabled", () -> String.valueOf(NODE2_RCS_SERVER_ENABLED.get()))) + .build(); + + queryCluster = ElasticsearchCluster.local() + .name("query-cluster") + .module("x-pack-autoscaling") + .module("x-pack-esql") + .module("x-pack-enrich") + .module("x-pack-ml") + .module("x-pack-ilm") + .module("ingest-common") + .apply(commonClusterConfig) + .setting("xpack.ml.enabled", "false") + .setting("xpack.security.remote_cluster_client.ssl.enabled", () -> String.valueOf(SSL_ENABLED_REF.get())) + .setting("xpack.security.remote_cluster_client.ssl.certificate_authorities", "remote-cluster-ca.crt") + .setting("xpack.security.authc.token.enabled", "true") + .keystore("cluster.remote.my_remote_cluster.credentials", () -> { + if (API_KEY_MAP_REF.get() == null) { + final Map apiKeyMap = createCrossClusterAccessApiKey(""" + { + "search": [ + { + "names": ["logs-*", "alias-*"] + } + ] + }"""); + API_KEY_MAP_REF.set(apiKeyMap); + } + return (String) API_KEY_MAP_REF.get().get("encoded"); + }) + .rolesFile(Resource.fromClasspath("roles.yml")) + .user("logs_foo_all", "x-pack-test-password", "logs_foo_all", false) + .user("logs_foo_16_only", "x-pack-test-password", "logs_foo_16_only", false) + .user("logs_foo_after_2021", "x-pack-test-password", "logs_foo_after_2021", false) + .user("logs_foo_after_2021_pattern", "x-pack-test-password", "logs_foo_after_2021_pattern", false) + .user("logs_foo_after_2021_alias", "x-pack-test-password", "logs_foo_after_2021_alias", false) + .build(); + } + + @ClassRule + // Use a RuleChain to ensure that fulfilling cluster is started before query cluster + // `SSL_ENABLED_REF` is used to control the SSL-enabled setting on the test clusters + // We set it here, since randomization methods are not available in the static initialize context above + public static TestRule clusterRule = RuleChain.outerRule(new RunnableTestRuleAdapter(() -> { + SSL_ENABLED_REF.set(usually()); + NODE1_RCS_SERVER_ENABLED.set(randomBoolean()); + NODE2_RCS_SERVER_ENABLED.set(randomBoolean()); + })).around(fulfillingCluster).around(queryCluster); + + public void testDataStreamsWithDlsAndFls() throws Exception { + configureRemoteCluster(); + createDataStreamOnFulfillingCluster(); + setupAdditionalUsersAndRoles(); + + doTestDataStreamsWithFlsAndDls(); + } + + private void setupAdditionalUsersAndRoles() throws IOException { + createUserAndRoleOnQueryCluster("fls_user_logs_pattern", "fls_user_logs_pattern", """ + { + "indices": [{"names": [""], "privileges": ["read"]}], + "remote_indices": [ + { + "names": ["logs-*"], + "privileges": ["read"], + "field_security": { + "grant": ["@timestamp", "data_stream.namespace"] + }, + "clusters": ["*"] + } + ] + }"""); + } +} diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java index 09449f81121f..d6bad85161fd 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityEsqlIT.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.remotecluster; +import org.apache.http.client.methods.HttpGet; import org.elasticsearch.Build; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; @@ -22,6 +23,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.util.resource.Resource; import org.elasticsearch.test.junit.RunnableTestRuleAdapter; +import org.elasticsearch.test.rest.ObjectPath; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; import org.junit.After; @@ -34,6 +36,7 @@ import java.io.UncheckedIOException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Base64; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; @@ -51,6 +54,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.not; public class RemoteClusterSecurityEsqlIT extends AbstractRemoteClusterSecurityTestCase { private static final AtomicReference> API_KEY_MAP_REF = new AtomicReference<>(); @@ -342,6 +346,14 @@ public class RemoteClusterSecurityEsqlIT extends AbstractRemoteClusterSecurityTe configureRemoteCluster(); populateData(); + Map esqlCcsLicenseFeatureUsage = fetchEsqlCcsFeatureUsageFromNode(client()); + + Object ccsLastUsedTimestampAtStartOfTest = null; + if (esqlCcsLicenseFeatureUsage.isEmpty() == false) { + // some test runs will have a usage value already, so capture that to compare at end of test + ccsLastUsedTimestampAtStartOfTest = esqlCcsLicenseFeatureUsage.get("last_used"); + } + // query remote cluster only Request request = esqlRequest(""" FROM my_remote_cluster:employees @@ -385,6 +397,15 @@ public class RemoteClusterSecurityEsqlIT extends AbstractRemoteClusterSecurityTe | LIMIT 2 | KEEP emp_id, department""")); assertRemoteOnlyAgainst2IndexResults(response); + + // check that the esql-ccs license feature is now present and that the last_used field has been updated + esqlCcsLicenseFeatureUsage = fetchEsqlCcsFeatureUsageFromNode(client()); + assertThat(esqlCcsLicenseFeatureUsage.size(), equalTo(5)); + Object lastUsed = esqlCcsLicenseFeatureUsage.get("last_used"); + assertNotNull("lastUsed should not be null", lastUsed); + if (ccsLastUsedTimestampAtStartOfTest != null) { + assertThat(lastUsed.toString(), not(equalTo(ccsLastUsedTimestampAtStartOfTest.toString()))); + } } @SuppressWarnings("unchecked") @@ -1660,4 +1681,18 @@ public class RemoteClusterSecurityEsqlIT extends AbstractRemoteClusterSecurityTe assertThat((int) shards.get("failed"), is(0)); } } + + private static Map fetchEsqlCcsFeatureUsageFromNode(RestClient client) throws IOException { + Request request = new Request(HttpGet.METHOD_NAME, "_license/feature_usage"); + request.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", basicAuthHeaderValue(USER, PASS))); + Response response = client.performRequest(request); + ObjectPath path = ObjectPath.createFromResponse(response); + List> features = path.evaluate("features"); + for (var feature : features) { + if ("esql-ccs".equals(feature.get("name"))) { + return feature; + } + } + return Collections.emptyMap(); + } } diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/resources/roles.yml b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/resources/roles.yml index b61daa068ed1..c09f9dc620a4 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/resources/roles.yml +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/resources/roles.yml @@ -41,3 +41,102 @@ ccr_user_role: manage_role: cluster: [ 'manage' ] + +logs_foo_all: + cluster: [] + indices: + - names: [ 'logs-foo' ] + privileges: [ 'read' ] + remote_indices: + - names: [ 'logs-foo' ] + clusters: [ '*' ] + privileges: [ 'read' ] + +logs_foo_16_only: + cluster: [] + indices: + - names: [ 'logs-foo' ] + privileges: [ 'read' ] + query: | + { + "term": { + "data_stream.namespace": "16" + } + } + remote_indices: + - names: [ 'logs-foo' ] + clusters: [ '*' ] + privileges: [ 'read' ] + query: | + { + "term": { + "data_stream.namespace": "16" + } + } + +logs_foo_after_2021: + cluster: [] + indices: + - names: [ 'logs-foo' ] + privileges: [ 'read' ] + query: | + { + "range": { + "@timestamp": {"gte": "2021-01-01T00:00:00"} + } + } + remote_indices: + - names: [ 'logs-foo' ] + clusters: [ '*' ] + privileges: [ 'read' ] + query: | + { + "range": { + "@timestamp": {"gte": "2021-01-01T00:00:00"} + } + } + +logs_foo_after_2021_pattern: + cluster: [] + indices: + - names: [ 'logs-*' ] + privileges: [ 'read' ] + query: | + { + "range": { + "@timestamp": {"gte": "2021-01-01T00:00:00"} + } + } + remote_indices: + - names: [ 'logs-*' ] + clusters: [ '*' ] + privileges: [ 'read' ] + query: | + { + "range": { + "@timestamp": {"gte": "2021-01-01T00:00:00"} + } + } + +logs_foo_after_2021_alias: + cluster: [] + indices: + - names: [ 'alias-foo' ] + privileges: [ 'read' ] + query: | + { + "range": { + "@timestamp": {"gte": "2021-01-01T00:00:00"} + } + } + remote_indices: + - names: [ 'alias-foo' ] + clusters: [ '*' ] + privileges: [ 'read' ] + query: | + { + "range": { + "@timestamp": {"gte": "2021-01-01T00:00:00"} + } + } + diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index db87fdbcb8f1..b139d1526ec2 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -639,6 +639,7 @@ public class Constants { "internal:index/metadata/migration_version/update", new FeatureFlag("reindex_data_stream").isEnabled() ? "indices:admin/migration/reindex_status" : null, new FeatureFlag("reindex_data_stream").isEnabled() ? "indices:admin/data_stream/reindex" : null, + new FeatureFlag("reindex_data_stream").isEnabled() ? "indices:admin/data_stream/reindex_cancel" : null, "internal:admin/repository/verify", "internal:admin/repository/verify/coordinate" ).filter(Objects::nonNull).collect(Collectors.toUnmodifiableSet()); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java index 7d99d5817bdc..bc730b5695c1 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java @@ -14,13 +14,16 @@ import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRespon import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Strings; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.NativeRealmIntegTestCase; import org.elasticsearch.test.SecuritySettingsSourceField; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; import java.util.Map; +import java.util.Random; import static java.util.Collections.singletonMap; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; @@ -60,6 +63,14 @@ public class KibanaUserRoleIntegTests extends NativeRealmIntegTestCase { return super.configUsersRoles() + "my_kibana_user:kibana_user\n" + "kibana_user:kibana_user"; } + @Override + protected Settings.Builder setRandomIndexSettings(Random random, Settings.Builder builder) { + // Prevent INDEX_CHECK_ON_STARTUP as a random setting since it could result in indices being checked for corruption before opening. + // When corruption is detected, it will prevent the shard from being opened. This check is expensive in terms of CPU and memory + // usage and causes intermittent CI failures due to timeout. + return super.setRandomIndexSettings(random, builder).put(IndexSettings.INDEX_CHECK_ON_STARTUP.getKey(), false); + } + public void testFieldMappings() throws Exception { final String index = "logstash-20-12-2015"; final String field = "foo"; diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/migrate/10_reindex.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/migrate/10_reindex.yml index f50a7a65f53d..9fb33b43f042 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/migrate/10_reindex.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/migrate/10_reindex.yml @@ -104,14 +104,23 @@ setup: name: my-data-stream - is_true: acknowledged -# Uncomment once the cancel API is in place -# - do: -# migrate.reindex: -# body: | -# { -# "mode": "upgrade", -# "source": { -# "index": "my-data-stream" -# } -# } -# - match: { task: "reindex-data-stream-my-data-stream" } + - do: + migrate.reindex: + body: | + { + "mode": "upgrade", + "source": { + "index": "my-data-stream" + } + } + - match: { acknowledged: true } + + - do: + migrate.cancel_reindex: + index: "my-data-stream" + - match: { acknowledged: true } + + - do: + catch: /resource_not_found_exception/ + migrate.cancel_reindex: + index: "my-data-stream" diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/migrate/20_reindex_status.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/migrate/20_reindex_status.yml index ae343a0b4db9..c94ce8dd211a 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/migrate/20_reindex_status.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/migrate/20_reindex_status.yml @@ -46,25 +46,39 @@ setup: name: my-data-stream - is_true: acknowledged -# Uncomment once the cancel API is in place -# - do: -# migrate.reindex: -# body: | -# { -# "mode": "upgrade", -# "source": { -# "index": "my-data-stream" -# } -# } -# - match: { acknowledged: true } -# -# - do: -# migrate.get_reindex_status: -# index: "my-data-stream" -# - match: { complete: true } -# - match: { total_indices: 1 } -# - match: { total_indices_requiring_upgrade: 0 } -# - match: { successes: 0 } -# - match: { in_progress: 0 } -# - match: { pending: 0 } -# - match: { errors: [] } + - do: + migrate.reindex: + body: | + { + "mode": "upgrade", + "source": { + "index": "my-data-stream" + } + } + - match: { acknowledged: true } + + - do: + migrate.get_reindex_status: + index: "my-data-stream" + - match: { complete: true } + - match: { total_indices: 1 } + - match: { total_indices_requiring_upgrade: 0 } + - match: { successes: 0 } + - match: { in_progress: 0 } + - match: { pending: 0 } + - match: { errors: [] } + + - do: + migrate.cancel_reindex: + index: "my-data-stream" + - match: { acknowledged: true } + + - do: + catch: /resource_not_found_exception/ + migrate.cancel_reindex: + index: "my-data-stream" + + - do: + catch: /resource_not_found_exception/ + migrate.get_reindex_status: + index: "my-data-stream" diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformFailureHandler.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformFailureHandler.java index 337d3c5820c0..24586e5f3633 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformFailureHandler.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformFailureHandler.java @@ -169,7 +169,14 @@ class TransformFailureHandler { * @param numFailureRetries the number of configured retries */ private void handleBulkIndexingException(BulkIndexingException bulkIndexingException, boolean unattended, int numFailureRetries) { - if (unattended == false && bulkIndexingException.isIrrecoverable()) { + if (bulkIndexingException.getCause() instanceof ClusterBlockException) { + retryWithoutIncrementingFailureCount( + bulkIndexingException, + bulkIndexingException.getDetailedMessage(), + unattended, + numFailureRetries + ); + } else if (unattended == false && bulkIndexingException.isIrrecoverable()) { String message = TransformMessages.getMessage( TransformMessages.LOG_TRANSFORM_PIVOT_IRRECOVERABLE_BULK_INDEXING_ERROR, bulkIndexingException.getDetailedMessage() @@ -232,12 +239,46 @@ class TransformFailureHandler { && unwrappedException.getClass().equals(context.getLastFailure().getClass()); final int failureCount = context.incrementAndGetFailureCount(unwrappedException); - if (unattended == false && numFailureRetries != -1 && failureCount > numFailureRetries) { fail(unwrappedException, "task encountered more than " + numFailureRetries + " failures; latest failure: " + message); return; } + logRetry(unwrappedException, message, unattended, numFailureRetries, failureCount, repeatedFailure); + } + + /** + * Terminate failure handling without incrementing the retries used + *

    + * This is used when there is an ongoing recoverable issue and we want to retain + * retries for any issues that may occur after the issue is resolved + * + * @param unwrappedException The exception caught + * @param message error message to log/audit + * @param unattended whether the transform runs in unattended mode + * @param numFailureRetries the number of configured retries + */ + private void retryWithoutIncrementingFailureCount( + Throwable unwrappedException, + String message, + boolean unattended, + int numFailureRetries + ) { + // group failures to decide whether to report it below + final boolean repeatedFailure = context.getLastFailure() != null + && unwrappedException.getClass().equals(context.getLastFailure().getClass()); + + logRetry(unwrappedException, message, unattended, numFailureRetries, context.getFailureCount(), repeatedFailure); + } + + private void logRetry( + Throwable unwrappedException, + String message, + boolean unattended, + int numFailureRetries, + int failureCount, + boolean repeatedFailure + ) { // Since our schedule fires again very quickly after failures it is possible to run into the same failure numerous // times in a row, very quickly. We do not want to spam the audit log with repeated failures, so only record the first one // and if the number of retries is about to exceed diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformFailureHandlerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformFailureHandlerTests.java index 84c8d4e14040..3894ff3043cc 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformFailureHandlerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformFailureHandlerTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.core.transform.transforms.SettingsConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskState; import org.elasticsearch.xpack.transform.notifications.MockTransformAuditor; +import java.util.List; import java.util.Map; import java.util.Set; @@ -63,9 +64,121 @@ public class TransformFailureHandlerTests extends ESTestCase { } } - public void testUnattended() { + public void testHandleIndexerFailure_CircuitBreakingExceptionNewPageSizeLessThanMinimumPageSize() { + var e = new CircuitBreakingException(randomAlphaOfLength(10), 1, 0, randomFrom(CircuitBreaker.Durability.values())); + assertRetryIfUnattendedOtherwiseFail(e); + } + + public void testHandleIndexerFailure_CircuitBreakingExceptionNewPageSizeNotLessThanMinimumPageSize() { + var e = new CircuitBreakingException(randomAlphaOfLength(10), 1, 1, randomFrom(CircuitBreaker.Durability.values())); + + List.of(true, false).forEach((unattended) -> { assertNoFailureAndContextPageSizeSet(e, unattended, 365); }); + } + + public void testHandleIndexerFailure_ScriptException() { + var e = new ScriptException( + randomAlphaOfLength(10), + new ArithmeticException(randomAlphaOfLength(10)), + singletonList(randomAlphaOfLength(10)), + randomAlphaOfLength(10), + randomAlphaOfLength(10) + ); + assertRetryIfUnattendedOtherwiseFail(e); + } + + public void testHandleIndexerFailure_BulkIndexExceptionWrappingClusterBlockException() { + final BulkIndexingException bulkIndexingException = new BulkIndexingException( + randomAlphaOfLength(10), + new ClusterBlockException(Map.of("test-index", Set.of(MetadataIndexStateService.INDEX_CLOSED_BLOCK))), + randomBoolean() + ); + + List.of(true, false).forEach((unattended) -> { assertRetryFailureCountNotIncremented(bulkIndexingException, unattended); }); + } + + public void testHandleIndexerFailure_IrrecoverableBulkIndexException() { + final BulkIndexingException e = new BulkIndexingException( + randomAlphaOfLength(10), + new ElasticsearchStatusException(randomAlphaOfLength(10), RestStatus.INTERNAL_SERVER_ERROR), + true + ); + assertRetryIfUnattendedOtherwiseFail(e); + } + + public void testHandleIndexerFailure_RecoverableBulkIndexException() { + final BulkIndexingException bulkIndexingException = new BulkIndexingException( + randomAlphaOfLength(10), + new ElasticsearchStatusException(randomAlphaOfLength(10), RestStatus.INTERNAL_SERVER_ERROR), + false + ); + + List.of(true, false).forEach((unattended) -> { assertRetry(bulkIndexingException, unattended); }); + } + + public void testHandleIndexerFailure_ClusterBlockException() { + List.of(true, false).forEach((unattended) -> { + assertRetry( + new ClusterBlockException(Map.of(randomAlphaOfLength(10), Set.of(MetadataIndexStateService.INDEX_CLOSED_BLOCK))), + unattended + ); + }); + } + + public void testHandleIndexerFailure_SearchPhaseExecutionExceptionWithNoShardSearchFailures() { + List.of(true, false).forEach((unattended) -> { + assertRetry( + new SearchPhaseExecutionException(randomAlphaOfLength(10), randomAlphaOfLength(10), ShardSearchFailure.EMPTY_ARRAY), + unattended + ); + }); + } + + public void testHandleIndexerFailure_SearchPhaseExecutionExceptionWithShardSearchFailures() { + List.of(true, false).forEach((unattended) -> { + assertRetry( + new SearchPhaseExecutionException( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + new ShardSearchFailure[] { new ShardSearchFailure(new Exception()) } + ), + unattended + ); + }); + } + + public void testHandleIndexerFailure_RecoverableElasticsearchException() { + List.of(true, false).forEach((unattended) -> { + assertRetry(new ElasticsearchStatusException(randomAlphaOfLength(10), RestStatus.INTERNAL_SERVER_ERROR), unattended); + }); + } + + public void testHandleIndexerFailure_IrrecoverableElasticsearchException() { + var e = new ElasticsearchStatusException(randomAlphaOfLength(10), RestStatus.NOT_FOUND); + assertRetryIfUnattendedOtherwiseFail(e); + } + + public void testHandleIndexerFailure_IllegalArgumentException() { + var e = new IllegalArgumentException(randomAlphaOfLength(10)); + assertRetryIfUnattendedOtherwiseFail(e); + } + + public void testHandleIndexerFailure_UnexpectedException() { + List.of(true, false).forEach((unattended) -> { assertRetry(new Exception(), unattended); }); + } + + private void assertRetryIfUnattendedOtherwiseFail(Exception e) { + List.of(true, false).forEach((unattended) -> { + if (unattended) { + assertRetry(e, unattended); + } else { + assertFailure(e); + } + }); + } + + private void assertRetry(Exception e, boolean unattended) { String transformId = randomAlphaOfLength(10); - SettingsConfig settings = new SettingsConfig.Builder().setUnattended(true).build(); + SettingsConfig settings = new SettingsConfig.Builder().setNumFailureRetries(2).setUnattended(unattended).build(); MockTransformAuditor auditor = MockTransformAuditor.createMockAuditor(); MockTransformContextListener contextListener = new MockTransformContextListener(); @@ -74,51 +187,33 @@ public class TransformFailureHandlerTests extends ESTestCase { TransformFailureHandler handler = new TransformFailureHandler(auditor, context, transformId); - handler.handleIndexerFailure( - new SearchPhaseExecutionException( - "query", - "Partial shards failure", - new ShardSearchFailure[] { - new ShardSearchFailure(new CircuitBreakingException("to much memory", 110, 100, CircuitBreaker.Durability.TRANSIENT)) } - ), - settings - ); - - // CBE isn't a failure, but it only affects page size(which we don't test here) - assertFalse(contextListener.getFailed()); - assertEquals(0, contextListener.getFailureCountChangedCounter()); - - assertNoFailure( - handler, - new SearchPhaseExecutionException( - "query", - "Partial shards failure", - new ShardSearchFailure[] { - new ShardSearchFailure( - new ScriptException( - "runtime error", - new ArithmeticException("/ by zero"), - singletonList("stack"), - "test", - "painless" - ) - ) } - ), - contextListener, - settings - ); - assertNoFailure( - handler, - new ElasticsearchStatusException("something really bad happened", RestStatus.INTERNAL_SERVER_ERROR), - contextListener, - settings - ); - assertNoFailure(handler, new IllegalArgumentException("expected apples not oranges"), contextListener, settings); - assertNoFailure(handler, new RuntimeException("the s*** hit the fan"), contextListener, settings); - assertNoFailure(handler, new NullPointerException("NPE"), contextListener, settings); + assertNoFailure(handler, e, contextListener, settings, true); + assertNoFailure(handler, e, contextListener, settings, true); + if (unattended) { + assertNoFailure(handler, e, contextListener, settings, true); + } else { + // fail after max retry attempts reached + assertFailure(handler, e, contextListener, settings, true); + } } - public void testClusterBlock() { + private void assertRetryFailureCountNotIncremented(Exception e, boolean unattended) { + String transformId = randomAlphaOfLength(10); + SettingsConfig settings = new SettingsConfig.Builder().setNumFailureRetries(2).setUnattended(unattended).build(); + + MockTransformAuditor auditor = MockTransformAuditor.createMockAuditor(); + MockTransformContextListener contextListener = new MockTransformContextListener(); + TransformContext context = new TransformContext(TransformTaskState.STARTED, "", 0, contextListener); + context.setPageSize(500); + + TransformFailureHandler handler = new TransformFailureHandler(auditor, context, transformId); + + assertNoFailure(handler, e, contextListener, settings, false); + assertNoFailure(handler, e, contextListener, settings, false); + assertNoFailure(handler, e, contextListener, settings, false); + } + + private void assertFailure(Exception e) { String transformId = randomAlphaOfLength(10); SettingsConfig settings = new SettingsConfig.Builder().setNumFailureRetries(2).build(); @@ -129,32 +224,50 @@ public class TransformFailureHandlerTests extends ESTestCase { TransformFailureHandler handler = new TransformFailureHandler(auditor, context, transformId); - final ClusterBlockException clusterBlock = new ClusterBlockException( - Map.of("test-index", Set.of(MetadataIndexStateService.INDEX_CLOSED_BLOCK)) - ); - - handler.handleIndexerFailure(clusterBlock, settings); - assertFalse(contextListener.getFailed()); - assertEquals(1, contextListener.getFailureCountChangedCounter()); - - handler.handleIndexerFailure(clusterBlock, settings); - assertFalse(contextListener.getFailed()); - assertEquals(2, contextListener.getFailureCountChangedCounter()); - - handler.handleIndexerFailure(clusterBlock, settings); - assertTrue(contextListener.getFailed()); - assertEquals(3, contextListener.getFailureCountChangedCounter()); + assertFailure(handler, e, contextListener, settings, false); } private void assertNoFailure( TransformFailureHandler handler, Exception e, MockTransformContextListener mockTransformContextListener, - SettingsConfig settings + SettingsConfig settings, + boolean failureCountIncremented ) { handler.handleIndexerFailure(e, settings); assertFalse(mockTransformContextListener.getFailed()); - assertEquals(1, mockTransformContextListener.getFailureCountChangedCounter()); + assertEquals(failureCountIncremented ? 1 : 0, mockTransformContextListener.getFailureCountChangedCounter()); + mockTransformContextListener.reset(); + } + + private void assertNoFailureAndContextPageSizeSet(Exception e, boolean unattended, int newPageSize) { + String transformId = randomAlphaOfLength(10); + SettingsConfig settings = new SettingsConfig.Builder().setNumFailureRetries(2).setUnattended(unattended).build(); + + MockTransformAuditor auditor = MockTransformAuditor.createMockAuditor(); + MockTransformContextListener contextListener = new MockTransformContextListener(); + TransformContext context = new TransformContext(TransformTaskState.STARTED, "", 0, contextListener); + context.setPageSize(500); + + TransformFailureHandler handler = new TransformFailureHandler(auditor, context, transformId); + + handler.handleIndexerFailure(e, settings); + assertFalse(contextListener.getFailed()); + assertEquals(0, contextListener.getFailureCountChangedCounter()); + assertEquals(newPageSize, context.getPageSize()); + contextListener.reset(); + } + + private void assertFailure( + TransformFailureHandler handler, + Exception e, + MockTransformContextListener mockTransformContextListener, + SettingsConfig settings, + boolean failureCountChanged + ) { + handler.handleIndexerFailure(e, settings); + assertTrue(mockTransformContextListener.getFailed()); + assertEquals(failureCountChanged ? 1 : 0, mockTransformContextListener.getFailureCountChangedCounter()); mockTransformContextListener.reset(); } diff --git a/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/querying_cluster/80_esql.yml b/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/querying_cluster/80_esql.yml index 4c0bbfd7ec13..1b435c551fbe 100644 --- a/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/querying_cluster/80_esql.yml +++ b/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/src/test/resources/rest-api-spec/test/querying_cluster/80_esql.yml @@ -86,11 +86,12 @@ teardown: ignore: 404 --- -"Index data and search on the mixed cluster": +"ES|QL cross-cluster query fails with basic license": - skip: features: allowed_warnings - do: + catch: bad_request allowed_warnings: - "Line 1:21: Square brackets '[]' need to be removed in FROM METADATA declaration" headers: { Authorization: "Basic am9lOnMza3JpdC1wYXNzd29yZA==" } @@ -98,23 +99,11 @@ teardown: body: query: 'FROM *:esql*,esql_* | STATS total = sum(cost) by tag | SORT tag | LIMIT 10' - - match: {columns.0.name: "total"} - - match: {columns.0.type: "long"} - - match: {columns.1.name: "tag"} - - match: {columns.1.type: "keyword"} - - - match: {values.0.0: 2200} - - match: {values.0.1: "computer"} - - match: {values.1.0: 170} - - match: {values.1.1: "headphone"} - - match: {values.2.0: 2100 } - - match: {values.2.1: "laptop" } - - match: {values.3.0: 1000 } - - match: {values.3.1: "monitor" } - - match: {values.4.0: 550 } - - match: {values.4.1: "tablet" } + - match: { error.type: "status_exception" } + - match: { error.reason: "A valid Enterprise license is required to run ES|QL cross-cluster searches. License found: active basic license" } - do: + catch: bad_request allowed_warnings: - "Line 1:21: Square brackets '[]' need to be removed in FROM METADATA declaration" headers: { Authorization: "Basic am9lOnMza3JpdC1wYXNzd29yZA==" } @@ -128,28 +117,11 @@ teardown: lte: "2023-01-03" format: "yyyy-MM-dd" - - match: {columns.0.name: "_index"} - - match: {columns.0.type: "keyword"} - - match: {columns.1.name: "tag"} - - match: {columns.1.type: "keyword"} - - match: {columns.2.name: "cost" } - - match: {columns.2.type: "long" } - - - match: {values.0.0: "esql_local"} - - match: {values.0.1: "monitor"} - - match: {values.0.2: 250 } - - match: {values.1.0: "my_remote_cluster:esql_index" } - - match: {values.1.1: "tablet"} - - match: {values.1.2: 450 } - - match: {values.2.0: "my_remote_cluster:esql_index" } - - match: {values.2.1: "computer" } - - match: {values.2.2: 1200 } - - match: {values.3.0: "esql_local"} - - match: {values.3.1: "laptop" } - - match: {values.3.2: 2100 } + - match: { error.type: "status_exception" } + - match: { error.reason: "A valid Enterprise license is required to run ES|QL cross-cluster searches. License found: active basic license" } --- -"Enrich across clusters": +"ES|QL enrich query across clusters fails with basic license": - requires: cluster_features: ["gte_v8.13.0"] reason: "Enrich across clusters available in 8.13 or later" @@ -194,27 +166,14 @@ teardown: index: suggestions - do: + catch: bad_request headers: { Authorization: "Basic am9lOnMza3JpdC1wYXNzd29yZA==" } esql.query: body: query: 'FROM *:esql*,esql_* | STATS total = sum(cost) by tag | SORT total DESC | LIMIT 3 | ENRICH suggestions | KEEP tag, total, phrase' - - match: {columns.0.name: "tag"} - - match: {columns.0.type: "keyword"} - - match: {columns.1.name: "total" } - - match: {columns.1.type: "long" } - - match: {columns.2.name: "phrase" } - - match: {columns.2.type: "keyword" } - - - match: {values.0.0: "computer"} - - match: {values.0.1: 2200} - - match: {values.0.2: "best desktop for programming"} - - match: {values.1.0: "laptop"} - - match: {values.1.1: 2100 } - - match: {values.1.2: "the best battery life laptop"} - - match: {values.2.0: "monitor" } - - match: {values.2.1: 1000 } - - match: {values.2.2: "4k or 5k or 6K monitor?" } + - match: { error.type: "status_exception" } + - match: { error.reason: "A valid Enterprise license is required to run ES|QL cross-cluster searches. License found: active basic license" } - do: enrich.delete_policy: diff --git a/x-pack/test/smb-fixture/build.gradle b/x-pack/test/smb-fixture/build.gradle index aeb5626ce950..a982259edb2d 100644 --- a/x-pack/test/smb-fixture/build.gradle +++ b/x-pack/test/smb-fixture/build.gradle @@ -2,6 +2,8 @@ apply plugin: 'elasticsearch.java' apply plugin: 'elasticsearch.cache-test-fixtures' dependencies { + implementation "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}" + api project(':test:fixtures:testcontainer-utils') api "junit:junit:${versions.junit}" api "org.testcontainers:testcontainers:${versions.testcontainer}" diff --git a/x-pack/test/smb-fixture/src/main/java/org/elasticsearch/test/fixtures/smb/SmbTestContainer.java b/x-pack/test/smb-fixture/src/main/java/org/elasticsearch/test/fixtures/smb/SmbTestContainer.java index 10f589e4e1df..27d8257f4be1 100644 --- a/x-pack/test/smb-fixture/src/main/java/org/elasticsearch/test/fixtures/smb/SmbTestContainer.java +++ b/x-pack/test/smb-fixture/src/main/java/org/elasticsearch/test/fixtures/smb/SmbTestContainer.java @@ -7,12 +7,18 @@ package org.elasticsearch.test.fixtures.smb; +import com.github.dockerjava.api.model.Capability; + import org.elasticsearch.test.fixtures.testcontainers.DockerEnvironmentAwareTestContainer; +import org.testcontainers.containers.wait.strategy.Wait; +import org.testcontainers.containers.wait.strategy.WaitAllStrategy; import org.testcontainers.images.builder.ImageFromDockerfile; +import java.time.Duration; + public final class SmbTestContainer extends DockerEnvironmentAwareTestContainer { - private static final String DOCKER_BASE_IMAGE = "ubuntu:16.04"; + private static final String DOCKER_BASE_IMAGE = "ubuntu:24.04"; public static final int AD_LDAP_PORT = 636; public static final int AD_LDAP_GC_PORT = 3269; @@ -20,15 +26,15 @@ public final class SmbTestContainer extends DockerEnvironmentAwareTestContainer super( new ImageFromDockerfile("es-smb-fixture").withDockerfileFromBuilder( builder -> builder.from(DOCKER_BASE_IMAGE) - .run("apt-get update -qqy && apt-get install -qqy samba ldap-utils") + .env("TZ", "Etc/UTC") + .run("DEBIAN_FRONTEND=noninteractive apt-get update -qqy && apt-get install -qqy tzdata winbind samba ldap-utils") .copy("fixture/provision/installsmb.sh", "/fixture/provision/installsmb.sh") .copy("fixture/certs/ca.key", "/fixture/certs/ca.key") .copy("fixture/certs/ca.pem", "/fixture/certs/ca.pem") .copy("fixture/certs/cert.pem", "/fixture/certs/cert.pem") .copy("fixture/certs/key.pem", "/fixture/certs/key.pem") .run("chmod +x /fixture/provision/installsmb.sh") - .run("/fixture/provision/installsmb.sh") - .cmd("service samba-ad-dc restart && sleep infinity") + .cmd("/fixture/provision/installsmb.sh && service samba-ad-dc restart && echo Samba started && sleep infinity") .build() ) .withFileFromClasspath("fixture/provision/installsmb.sh", "/smb/provision/installsmb.sh") @@ -37,10 +43,20 @@ public final class SmbTestContainer extends DockerEnvironmentAwareTestContainer .withFileFromClasspath("fixture/certs/cert.pem", "/smb/certs/cert.pem") .withFileFromClasspath("fixture/certs/key.pem", "/smb/certs/key.pem") ); - // addExposedPort(389); - // addExposedPort(3268); + addExposedPort(AD_LDAP_PORT); addExposedPort(AD_LDAP_GC_PORT); + + setWaitStrategy( + new WaitAllStrategy().withStartupTimeout(Duration.ofSeconds(120)) + .withStrategy(Wait.forLogMessage(".*Samba started.*", 1)) + .withStrategy(Wait.forListeningPort()) + ); + + getCreateContainerCmdModifiers().add(createContainerCmd -> { + createContainerCmd.getHostConfig().withCapAdd(Capability.SYS_ADMIN); + return createContainerCmd; + }); } public String getAdLdapUrl() { diff --git a/x-pack/test/smb-fixture/src/main/resources/smb/provision/installsmb.sh b/x-pack/test/smb-fixture/src/main/resources/smb/provision/installsmb.sh old mode 100644 new mode 100755 index 463238b9f50c..fe939431bb43 --- a/x-pack/test/smb-fixture/src/main/resources/smb/provision/installsmb.sh +++ b/x-pack/test/smb-fixture/src/main/resources/smb/provision/installsmb.sh @@ -21,7 +21,7 @@ cat $SSL_DIR/ca.pem >> /etc/ssl/certs/ca-certificates.crt mv /etc/samba/smb.conf /etc/samba/smb.conf.orig -samba-tool domain provision --server-role=dc --use-rfc2307 --dns-backend=SAMBA_INTERNAL --realm=AD.TEST.ELASTICSEARCH.COM --domain=ADES --adminpass=Passw0rd --use-ntvfs +samba-tool domain provision --server-role=dc --use-rfc2307 --dns-backend=SAMBA_INTERNAL --realm=AD.TEST.ELASTICSEARCH.COM --domain=ADES --adminpass=Passw0rd cp /var/lib/samba/private/krb5.conf /etc/krb5.conf