From 90ab2558b03f59c38b8223c2b881f97da7cd4394 Mon Sep 17 00:00:00 2001
From: Benjamin Trent
Date: Tue, 11 Jun 2024 08:07:40 -0400
Subject: [PATCH 01/34] Adjusting bwc version after backport of #109423
(#109469)
Co-authored-by: Elastic Machine
---
.../170_knn_search_hex_encoded_byte_vectors.yml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/170_knn_search_hex_encoded_byte_vectors.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/170_knn_search_hex_encoded_byte_vectors.yml
index be1e619d046a..c4d8b1f0929b 100644
--- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/170_knn_search_hex_encoded_byte_vectors.yml
+++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/170_knn_search_hex_encoded_byte_vectors.yml
@@ -164,8 +164,8 @@ setup:
---
"Dynamic dimensions for hex-encoded string":
- requires:
- cluster_features: "gte_v8.15.0"
- reason: 'hex encoding for byte vectors fixed in 8.15'
+ cluster_features: "gte_v8.14.1"
+ reason: 'hex encoding for byte vectors fixed in 8.14.1'
- do:
indices.create:
From b97d5cfdb746605f722dead3aff3e7a122590ded Mon Sep 17 00:00:00 2001
From: Benjamin Trent
Date: Tue, 11 Jun 2024 08:15:37 -0400
Subject: [PATCH 02/34] Test mute for DenseVectorMappingUpdateIT #109571
(#109572)
---
muted-tests.yml | 2 ++
1 file changed, 2 insertions(+)
diff --git a/muted-tests.yml b/muted-tests.yml
index d82c823f664b..eeafea033fc8 100644
--- a/muted-tests.yml
+++ b/muted-tests.yml
@@ -62,6 +62,8 @@ tests:
- class: "org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT"
issue: "https://github.com/elastic/elasticsearch/issues/109478"
method: "test {yaml=reference/esql/processing-commands/lookup/line_31}"
+- class: DenseVectorMappingUpdateIT
+ issue: "https://github.com/elastic/elasticsearch/issues/109571"
# Examples:
#
From d6fb5cfbe6b8a519c18b0d4d952a8fee0c65e8c0 Mon Sep 17 00:00:00 2001
From: Liam Thompson <32779855+leemthompo@users.noreply.github.com>
Date: Tue, 11 Jun 2024 14:59:40 +0200
Subject: [PATCH 03/34] [DOCS] Expand context about `xpack.security.enabled`
setting (#109575)
---
docs/reference/settings/security-settings.asciidoc | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/docs/reference/settings/security-settings.asciidoc b/docs/reference/settings/security-settings.asciidoc
index e0d01965479c..7dd9d0574638 100644
--- a/docs/reference/settings/security-settings.asciidoc
+++ b/docs/reference/settings/security-settings.asciidoc
@@ -23,7 +23,9 @@ For more information about creating and updating the {es} keystore, see
==== General security settings
`xpack.security.enabled`::
(<>)
-Defaults to `true`, which enables {es} {security-features} on the node. +
+Defaults to `true`, which enables {es} {security-features} on the node.
+This setting must be enabled to use Elasticsearch's authentication,
+authorization and audit features. +
+
--
If set to `false`, {security-features} are disabled, which is not recommended.
From 71d2faa631f183511b0612cc004c08df0f695d32 Mon Sep 17 00:00:00 2001
From: Przemyslaw Gomulka
Date: Tue, 11 Jun 2024 15:05:02 +0200
Subject: [PATCH 04/34] Remove :qa:apm module (#109565)
the test was disabled due to problem with availability of docker images after the release.
It was reimplemented in :test:external-modules:apm-integration TracesApmIT
closes #90308
---
qa/apm/build.gradle | 44 ----
qa/apm/config/elasticsearch/roles.yml | 34 ---
qa/apm/config/elasticsearch/service_tokens | 2 -
qa/apm/config/elasticsearch/users | 9 -
qa/apm/config/elasticsearch/users_roles | 13 --
qa/apm/config/kibana/kibana-8.yml | 78 -------
qa/apm/docker-compose.yml | 154 -------------
qa/apm/scripts/tls/apm-server/cert.crt | 27 ---
qa/apm/scripts/tls/apm-server/key.pem | 52 -----
.../elasticsearch/telemetry/apm/ApmIT.java | 210 ------------------
10 files changed, 623 deletions(-)
delete mode 100644 qa/apm/build.gradle
delete mode 100644 qa/apm/config/elasticsearch/roles.yml
delete mode 100644 qa/apm/config/elasticsearch/service_tokens
delete mode 100644 qa/apm/config/elasticsearch/users
delete mode 100644 qa/apm/config/elasticsearch/users_roles
delete mode 100644 qa/apm/config/kibana/kibana-8.yml
delete mode 100644 qa/apm/docker-compose.yml
delete mode 100644 qa/apm/scripts/tls/apm-server/cert.crt
delete mode 100644 qa/apm/scripts/tls/apm-server/key.pem
delete mode 100644 qa/apm/src/test/java/org/elasticsearch/telemetry/apm/ApmIT.java
diff --git a/qa/apm/build.gradle b/qa/apm/build.gradle
deleted file mode 100644
index ff22334462fd..000000000000
--- a/qa/apm/build.gradle
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-import org.elasticsearch.gradle.Architecture
-import org.elasticsearch.gradle.VersionProperties
-import org.elasticsearch.gradle.internal.info.BuildParams
-
-import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER
-
-apply plugin: 'elasticsearch.standalone-rest-test'
-apply plugin: 'elasticsearch.test.fixtures'
-apply plugin: 'elasticsearch.internal-distribution-download'
-
-dockerCompose {
- environment.put 'STACK_VERSION', BuildParams.snapshotBuild ? VersionProperties.elasticsearch : VersionProperties.elasticsearch + "-SNAPSHOT"
-}
-
-elasticsearch_distributions {
- docker {
- type = DOCKER
- architecture = Architecture.current()
- version = VersionProperties.getElasticsearch()
- failIfUnavailable = false // This ensures we skip this testing if Docker is unavailable
- }
-}
-
-tasks.named("preProcessFixture").configure {
- dependsOn elasticsearch_distributions.matching { it.architecture == Architecture.current() }
-}
-
-tasks.register("integTest", Test) {
- outputs.doNotCacheIf('Build cache is disabled for Docker tests') { true }
- maxParallelForks = '1'
- include '**/*IT.class'
-}
-
-tasks.named("check").configure {
- dependsOn "integTest"
-}
diff --git a/qa/apm/config/elasticsearch/roles.yml b/qa/apm/config/elasticsearch/roles.yml
deleted file mode 100644
index 91277fa8dd65..000000000000
--- a/qa/apm/config/elasticsearch/roles.yml
+++ /dev/null
@@ -1,34 +0,0 @@
----
-apm_server:
- cluster: ['manage_ilm', 'manage_security', 'manage_api_key']
- indices:
- - names: ['apm-*', 'logs-apm*', 'metrics-apm*', 'traces-apm*']
- privileges: ['write', 'create_index', 'manage', 'manage_ilm']
- applications:
- - application: 'apm'
- privileges: ['sourcemap:write', 'event:write', 'config_agent:read']
- resources: '*'
-beats:
- cluster: ['manage_index_templates', 'monitor', 'manage_ingest_pipelines', 'manage_ilm', 'manage_security', 'manage_api_key']
- indices:
- - names: ['filebeat-*', 'shrink-filebeat-*']
- privileges: ['all']
-filebeat:
- cluster: ['manage_index_templates', 'monitor', 'manage_ingest_pipelines', 'manage_ilm']
- indices:
- - names: ['filebeat-*', 'shrink-filebeat-*']
- privileges: ['all']
-heartbeat:
- cluster: ['manage_index_templates', 'monitor', 'manage_ingest_pipelines', 'manage_ilm']
- indices:
- - names: ['heartbeat-*', 'shrink-heartbeat-*']
- privileges: ['all']
-metricbeat:
- cluster: ['manage_index_templates', 'monitor', 'manage_ingest_pipelines', 'manage_ilm']
- indices:
- - names: ['metricbeat-*', 'shrink-metricbeat-*']
- privileges: ['all']
-opbeans:
- indices:
- - names: ['opbeans-*']
- privileges: ['write', 'read']
diff --git a/qa/apm/config/elasticsearch/service_tokens b/qa/apm/config/elasticsearch/service_tokens
deleted file mode 100644
index 02c39a69bc9b..000000000000
--- a/qa/apm/config/elasticsearch/service_tokens
+++ /dev/null
@@ -1,2 +0,0 @@
-elastic/fleet-server/elastic-package-fleet-server-token:{PBKDF2_STRETCH}10000$PNiVyY96dHwRfoDszBvYPAz+mSLbC+NhtPh63dblDZU=$dAY1tXX1U5rXB+2Lt7m0L2LUNSb1q5nRaIqPNZTBxb8=
-elastic/kibana/elastic-package-kibana-token:{PBKDF2_STRETCH}10000$wIEFHIIIZ2ap0D0iQsyw0MfB7YuFA1bHnXAmlCoL4Gg=$YxvIJnasjLZyDQZpmFBiJHdR/CGXd5BnVm013Jty6p0=
diff --git a/qa/apm/config/elasticsearch/users b/qa/apm/config/elasticsearch/users
deleted file mode 100644
index 4cc30a99d92f..000000000000
--- a/qa/apm/config/elasticsearch/users
+++ /dev/null
@@ -1,9 +0,0 @@
-admin:$2a$10$xiY0ZzOKmDDN1p3if4t4muUBwh2.bFHADoMRAWQgSClm4ZJ4132Y.
-apm_server_user:$2a$10$iTy29qZaCSVn4FXlIjertuO8YfYVLCbvoUAJ3idaXfLRclg9GXdGG
-apm_user_ro:$2a$10$hQfy2o2u33SapUClsx8NCuRMpQyHP9b2l4t3QqrBA.5xXN2S.nT4u
-beats_user:$2a$10$LRpKi4/Q3Qo4oIbiu26rH.FNIL4aOH4aj2Kwi58FkMo1z9FgJONn2
-filebeat_user:$2a$10$sFxIEX8tKyOYgsbJLbUhTup76ssvSD3L4T0H6Raaxg4ewuNr.lUFC
-heartbeat_user:$2a$10$nKUGDr/V5ClfliglJhfy8.oEkjrDtklGQfhd9r9NoFqQeoNxr7uUK
-kibana_system_user:$2a$10$nN6sRtQl2KX9Gn8kV/.NpOLSk6Jwn8TehEDnZ7aaAgzyl/dy5PYzW
-metricbeat_user:$2a$10$5PyTd121U2ZXnFk9NyqxPuLxdptKbB8nK5egt6M5/4xrKUkk.GReG
-opbeans_user:$2a$10$iTy29qZaCSVn4FXlIjertuO8YfYVLCbvoUAJ3idaXfLRclg9GXdGG
diff --git a/qa/apm/config/elasticsearch/users_roles b/qa/apm/config/elasticsearch/users_roles
deleted file mode 100644
index 629fe7392c12..000000000000
--- a/qa/apm/config/elasticsearch/users_roles
+++ /dev/null
@@ -1,13 +0,0 @@
-apm_server:apm_server_user
-apm_system:apm_server_user
-apm_user:apm_server_user,apm_user_ro
-beats:beats_user
-beats_system:beats_user,filebeat_user,heartbeat_user,metricbeat_user
-filebeat:filebeat_user
-heartbeat:heartbeat_user
-ingest_admin:apm_server_user
-kibana_system:kibana_system_user
-kibana_user:apm_server_user,apm_user_ro,beats_user,filebeat_user,heartbeat_user,metricbeat_user,opbeans_user
-metricbeat:metricbeat_user
-opbeans:opbeans_user
-superuser:admin
diff --git a/qa/apm/config/kibana/kibana-8.yml b/qa/apm/config/kibana/kibana-8.yml
deleted file mode 100644
index 4b3add76282d..000000000000
--- a/qa/apm/config/kibana/kibana-8.yml
+++ /dev/null
@@ -1,78 +0,0 @@
-xpack.fleet.packages:
- - name: system
- version: latest
- - name: elastic_agent
- version: latest
- - name: apm
- version: latest
- - name: fleet_server
- version: latest
-
-xpack.fleet.agentPolicies:
- - name: Fleet Server + APM policy
- id: fleet-server-apm-policy
- description: Fleet server policy with APM and System logs and metrics enabled
- namespace: default
- is_default_fleet_server: true
- is_managed: false
- monitoring_enabled:
- - logs
- - metrics
- package_policies:
- - name: system-1
- package:
- name: system
- - name: apm-1
- package:
- name: apm
- inputs:
- - type: apm
- keep_enabled: true
- vars:
- - name: host
- value: 0.0.0.0:8200
- frozen: true
- - name: url
- value: "${ELASTIC_APM_SERVER_URL}"
- frozen: true
- - name: enable_rum
- value: true
- frozen: true
- - name: read_timeout
- value: 1m
- frozen: true
- - name: shutdown_timeout
- value: 2m
- frozen: true
- - name: write_timeout
- value: 1m
- frozen: true
- - name: rum_allow_headers
- value:
- - x-custom-header
- frozen: true
- - name: secret_token
- value: "${ELASTIC_APM_SECRET_TOKEN}"
- frozen: true
- - name: tls_enabled
- value: ${ELASTIC_APM_TLS}
- frozen: true
- - name: tls_certificate
- value: /usr/share/apmserver/config/certs/tls.crt
- frozen: true
- - name: tls_key
- value: /usr/share/apmserver/config/certs/tls.key
- frozen: true
- - name: Fleet Server
- package:
- name: fleet_server
- inputs:
- - type: fleet-server
- keep_enabled: true
- vars:
- - name: host
- value: 0.0.0.0
- frozen: true
- - name: port
- value: 8220
- frozen: true
diff --git a/qa/apm/docker-compose.yml b/qa/apm/docker-compose.yml
deleted file mode 100644
index a3969479d091..000000000000
--- a/qa/apm/docker-compose.yml
+++ /dev/null
@@ -1,154 +0,0 @@
-version: "2.4"
-
-networks:
- default:
- name: apm-integration-testing
-
-services:
- apmserver:
- depends_on:
- kibana:
- condition: service_healthy
- environment:
- FLEET_ELASTICSEARCH_HOST: null
- FLEET_SERVER_ELASTICSEARCH_INSECURE: "1"
- FLEET_SERVER_ENABLE: "1"
- FLEET_SERVER_HOST: 0.0.0.0
- FLEET_SERVER_INSECURE_HTTP: "1"
- FLEET_SERVER_POLICY_ID: fleet-server-apm-policy
- FLEET_SERVER_PORT: "8220"
- FLEET_SERVER_SERVICE_TOKEN: AAEAAWVsYXN0aWMvZmxlZXQtc2VydmVyL2VsYXN0aWMtcGFja2FnZS1mbGVldC1zZXJ2ZXItdG9rZW46bmgtcFhoQzRRQ2FXbms2U0JySGlWQQ
- KIBANA_FLEET_HOST: null
- KIBANA_FLEET_SERVICE_TOKEN: AAEAAWVsYXN0aWMvZmxlZXQtc2VydmVyL2VsYXN0aWMtcGFja2FnZS1mbGVldC1zZXJ2ZXItdG9rZW46bmgtcFhoQzRRQ2FXbms2U0JySGlWQQ
- KIBANA_FLEET_SETUP: "1"
- healthcheck:
- test: /bin/true
- image: docker.elastic.co/beats/elastic-agent:${STACK_VERSION}
- labels:
- - co.elastic.apm.stack-version=${STACK_VERSION}
- logging:
- driver: json-file
- options:
- max-file: "5"
- max-size: 2m
- volumes:
- - /var/run/docker.sock:/var/run/docker.sock
- - ./scripts/tls/apmserver/cert.crt:/usr/share/apmserver/config/certs/tls.crt
- - ./scripts/tls/apmserver/key.pem:/usr/share/apmserver/config/certs/tls.key
-
- elasticsearch:
- environment:
- - action.destructive_requires_name=false
- - bootstrap.memory_lock=true
- - cluster.name=docker-cluster
- - cluster.routing.allocation.disk.threshold_enabled=false
- - discovery.type=single-node
- - ES_JAVA_OPTS=-Xms1g -Xmx1g
- - indices.id_field_data.enabled=true
- - ingest.geoip.downloader.enabled=false
- - path.repo=/usr/share/elasticsearch/data/backups
- - xpack.license.self_generated.type=trial
- - xpack.monitoring.collection.enabled=true
- - xpack.security.authc.anonymous.roles=remote_monitoring_collector
- - xpack.security.authc.api_key.enabled=true
- - xpack.security.authc.realms.file.file1.order=0
- - xpack.security.authc.realms.native.native1.order=1
- - xpack.security.authc.token.enabled=true
- - xpack.security.enabled=true
- # APM specific settings. We don't configure `secret_key` because Kibana is configured with a blank key
- - telemetry.tracing.enabled=true
- - telemetry.agent.server_url=http://apmserver:8200
- # Send traces to APM server aggressively
- - telemetry.agent.metrics_interval=1s
- # Record everything
- - telemetry.agent.transaction_sample_rate=1
- - telemetry.agent.log_level=debug
- healthcheck:
- interval: 20s
- retries: 10
- test: curl -s -k http://localhost:9200/_cluster/health | grep -vq '"status":"red"'
- image: elasticsearch:test
- labels:
- - co.elastic.apm.stack-version=${STACK_VERSION}
- - co.elastic.metrics/module=elasticsearch
- - co.elastic.metrics/metricsets=node,node_stats
- - co.elastic.metrics/hosts=http://$${data.host}:9200
- logging:
- driver: json-file
- options:
- max-file: "5"
- max-size: 2m
- ports:
- # - 127.0.0.1:9200:9200
- - "9200"
- ulimits:
- memlock:
- hard: -1
- soft: -1
- volumes:
- - ./config/elasticsearch/roles.yml:/usr/share/elasticsearch/config/roles.yml
- - ./config/elasticsearch/users:/usr/share/elasticsearch/config/users
- - ./config/elasticsearch/users_roles:/usr/share/elasticsearch/config/users_roles
- - ./config/elasticsearch/service_tokens:/usr/share/elasticsearch/config/service_tokens
-
- kibana:
- depends_on:
- elasticsearch:
- condition: service_healthy
- environment:
- ELASTICSEARCH_HOSTS: http://elasticsearch:9200
- ELASTICSEARCH_PASSWORD: changeme
- ELASTICSEARCH_USERNAME: kibana_system_user
- ELASTIC_APM_SECRET_TOKEN: ""
- ELASTIC_APM_SERVER_URL: http://apmserver:8200
- ELASTIC_APM_TLS: "false"
- SERVER_HOST: 0.0.0.0
- SERVER_NAME: kibana.example.org
- STATUS_ALLOWANONYMOUS: "true"
- TELEMETRY_ENABLED: "false"
- XPACK_APM_SERVICEMAPENABLED: "true"
- XPACK_ENCRYPTEDSAVEDOBJECTS_ENCRYPTIONKEY: fhjskloppd678ehkdfdlliverpoolfcr
- XPACK_FLEET_AGENTS_ELASTICSEARCH_HOSTS: '["http://elasticsearch:9200"]'
- # XPACK_FLEET_REGISTRYURL: https://epr-snapshot.elastic.co
- XPACK_MONITORING_ENABLED: "true"
- XPACK_REPORTING_ROLES_ENABLED: "false"
- XPACK_SECURITY_ENCRYPTIONKEY: fhjskloppd678ehkdfdlliverpoolfcr
- XPACK_SECURITY_LOGINASSISTANCEMESSAGE: Login details: `admin/changeme`. Further details [here](https://github.com/elastic/apm-integration-testing#logging-in).
- XPACK_SECURITY_SESSION_IDLETIMEOUT: 1M
- XPACK_SECURITY_SESSION_LIFESPAN: 3M
- XPACK_XPACK_MAIN_TELEMETRY_ENABLED: "false"
- healthcheck:
- interval: 10s
- retries: 30
- start_period: 10s
- test: curl -s -k http://kibana:5601/api/status | grep -q 'All services are available'
- image: docker.elastic.co/kibana/kibana:${STACK_VERSION}
- labels:
- - co.elastic.apm.stack-version=${STACK_VERSION}
- logging:
- driver: json-file
- options:
- max-file: "5"
- max-size: 2m
- # ports:
- # - 127.0.0.1:5601:5601
- volumes:
- - ./config/kibana/kibana-8.yml:/usr/share/kibana/config/kibana.yml
-
- # Rather than mess aroud with threads in the test, just run `curl` in a
- # loop to generate traces with a known path
- tracegenerator:
- depends_on:
- apmserver:
- condition: service_healthy
- elasticsearch:
- condition: service_healthy
- kibana:
- condition: service_healthy
- # Official curl image
- image: curlimages/curl
- command: /bin/sh -c "while true; do curl -s -k -u admin:changeme http://elasticsearch:9200/_nodes/stats >/dev/null ; sleep 3; done"
-
-volumes:
- esdata:
- driver: local
diff --git a/qa/apm/scripts/tls/apm-server/cert.crt b/qa/apm/scripts/tls/apm-server/cert.crt
deleted file mode 100644
index b2f9aa7b5d23..000000000000
--- a/qa/apm/scripts/tls/apm-server/cert.crt
+++ /dev/null
@@ -1,27 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIEpjCCAo4CCQDR9oXvJbopHjANBgkqhkiG9w0BAQsFADAVMRMwEQYDVQQDDAph
-cG0tc2VydmVyMB4XDTE5MTExOTE1MjE0NVoXDTI5MTExNjE1MjE0NVowFTETMBEG
-A1UEAwwKYXBtLXNlcnZlcjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB
-ANduj3tyeBIHj0Bf5aKMRImhRbkAaQ2p6T0WsHKlicd1P4/D5l783+vVsbwprRqR
-qXAUsUWcUSYJXBX1qtC2MtKqi4xYUTAyQV5dgrMoCV+vtZY31SK4kolumd1vVMh+
-po+IwueLvLMFK1tQGIXlJblSDYVauIt5rp79IIhWOY/YpcQy9RaxykljTYTbPjLW
-m3T92bow1nLh5GL3ThJEAkLO+hkJv9716+YRWYtPcojiGzpLjFgF50MoP4Lilm9U
-r2tBnqpvb2PwE1kkly8DDBtcg+HM4tgGsbdWo2Pgp82ARV4DL+JlNJ+SVQZAmTbc
-3LMwxnUJtuKMeh2rwb9HOyuONXfF1PiEzyDhAlabyS6toAGy1mlMAop1ClO1wV5O
-Ayy47TeD6ziNyMKB7/XHdW4rb16K6j6EV27Bg2ZK6Vrfkwm3aRbpztfVRMX+HMUp
-ktH+V2OwJoP7l7lzw/q8yMdopG57zRJa1dx8NWP/UKi8Ej+87DYyWJODiNHD7PM7
-9vfd47lNcWxw+p7ntEpnn6EeW2r7SlmfhtdIxL2DiTiKAq9Ktyi9cFnGnDfSDJST
-T1G1vIDdG33Vt2Y5+wqzCGbYyMsAOaMdXZSeniXXFR4GX7iz+AGoKojBbmoo9VqP
-mvbudNU+ysha4IJvTfOczJZgstxCXG+MXbEXFSgysImFAgMBAAEwDQYJKoZIhvcN
-AQELBQADggIBAFh2YxRT6PaAXDq38rm25I91fCP9PzVPDuIkn9wl85e7avuh6FZi
-R0nQG6+lB1i8XSm9UMl9+ISjE+EQqry6KB6mDsakGOsDuEUdZiw3sGJIUWQkQArB
-ym5DqxKpeZBeVHBxnrEbQBV8s0j8uxd7X1E0ImfMKbKfNr/B5qPRXkREvydLWYvq
-8yMcUPu1MiZFUgAGr9Py39kW3lbRPWZii/2bN8AB9h6gAhq5TiennfgJZsRiuSta
-w/TmOcAuz4e/KPIzfvL/YCWbLyJ2vrIQeOc4N7jZfqMmLKgYCRyjI7+amfuyKPBW
-J4psfJ0ssHdTxAUK65vghJ2s6FLvU3HoxzetZsJp5kj6CKYaFYkB4NkkYnlY8MP/
-T68oOmdYwwwrcBmDtZwoppRb5zhev5k3aykgZ/B/vqVJE9oIPkp/7wqEP1WqSiUe
-AgyQBu8UN4ho2Rf6nZezZ4cjW/0WyhGOHQBFmwPI2MBGsQxF2PF4lKkJtaywIEm7
-4UsEQYK7Hf2J2OccWGvfo5HZ5tsSbuOGAf0bfHfaBQBsvzWet+TO6XX9VrWjnAKl
-bH+mInmnd9v2oABFl9Djv/Cw+lEAxxkCTW+DcwdEFJREPab5xhQDEpQQ/Ef0ihvg
-/ZtJQeoOYfrLN6K726QmoRWxvqxLyWK3gztcO1svHqr/cMt3ooLJEaqU
------END CERTIFICATE-----
diff --git a/qa/apm/scripts/tls/apm-server/key.pem b/qa/apm/scripts/tls/apm-server/key.pem
deleted file mode 100644
index 31208905f7d7..000000000000
--- a/qa/apm/scripts/tls/apm-server/key.pem
+++ /dev/null
@@ -1,52 +0,0 @@
------BEGIN PRIVATE KEY-----
-MIIJRAIBADANBgkqhkiG9w0BAQEFAASCCS4wggkqAgEAAoICAQDXbo97cngSB49A
-X+WijESJoUW5AGkNqek9FrBypYnHdT+Pw+Ze/N/r1bG8Ka0akalwFLFFnFEmCVwV
-9arQtjLSqouMWFEwMkFeXYKzKAlfr7WWN9UiuJKJbpndb1TIfqaPiMLni7yzBStb
-UBiF5SW5Ug2FWriLea6e/SCIVjmP2KXEMvUWscpJY02E2z4y1pt0/dm6MNZy4eRi
-904SRAJCzvoZCb/e9evmEVmLT3KI4hs6S4xYBedDKD+C4pZvVK9rQZ6qb29j8BNZ
-JJcvAwwbXIPhzOLYBrG3VqNj4KfNgEVeAy/iZTSfklUGQJk23NyzMMZ1CbbijHod
-q8G/RzsrjjV3xdT4hM8g4QJWm8kuraABstZpTAKKdQpTtcFeTgMsuO03g+s4jcjC
-ge/1x3VuK29eiuo+hFduwYNmSula35MJt2kW6c7X1UTF/hzFKZLR/ldjsCaD+5e5
-c8P6vMjHaKRue80SWtXcfDVj/1CovBI/vOw2MliTg4jRw+zzO/b33eO5TXFscPqe
-57RKZ5+hHltq+0pZn4bXSMS9g4k4igKvSrcovXBZxpw30gyUk09RtbyA3Rt91bdm
-OfsKswhm2MjLADmjHV2Unp4l1xUeBl+4s/gBqCqIwW5qKPVaj5r27nTVPsrIWuCC
-b03znMyWYLLcQlxvjF2xFxUoMrCJhQIDAQABAoICAQCfClIGsoUN2mLZBXLDw4W9
-jT+pyjHEEpHLtXphyO+kPlzER71Elq7AriveW24d1TcfNUeBulr2F6bR12FZX4i5
-mYoX/AND73Xusl4Q4Re6ej82PNWuIlCcAPi6Trxqn4VbJX2t7q1KBCDz8neIMZjd
-7UNqFYV0Akr1uK1RuUYZebk21N+29139O8A4upp6cZCml9kq6W8HtNgkb6pFNcvt
-gluELHxnn2mdmWVfwTEu+K1dJfTf7svB+m6Ys6qXWg9+wRzfehDj2JKQFsE9xaQk
-dvItulIlZRvB28YXr/xxa6bKNtQc8NYej6sRSJNTu017RCDeumM3cLmeOfR4v59f
-tkMWnFcA3ykmsaK2FiQyX+MoWvs5vdT7/yNIfz3a4MErcWg8z3FDbffKfbhgsb+2
-z4Ub6fIRKZykW2ajN7t0378bMmJ3rPT66QF40aNNeWasF3EHcwekDPpsHIBJoY4G
-9aG6uTUmRkC+NGeP9HroxkvDo2NbXn8XGOEJS64rwsME3CsUi1A5ZY0XLTxYptH6
-X2TfC5oTmnsYB/wWqo26bTJc0bwDOueQWYap0aVtv3f/0tzueKepCbxdeG4ikA0U
-2t3F+OUmoCZ5D0p+6zLvrTUPhPCFEynp+vGUvmbwozYi0NWzFyFqlvqRG1KLIVLG
-ZRyTMYuZ/cWkv1SJYbEcaQKCAQEA/9HaJg2YACv7rx6/FesE/81u16OYTaahHngW
-4M+5rT0+fNKYH/fYkwavQ/Gr6FSTls7F+8K9DVwoGLZRQ3t6epCXqGqX0uaY+iSH
-O8eezXVnHzUaVE4KlwJY9xZ+K1iIf5zUb5hpaQI0jKS/igcxFAsutWiyenrz8eQp
-MAycZmzkQMLbUsa1t6y0VaEaC4YMHyQ9ag2eMfqbG27plFQbYxllHXowGMFXPheY
-xACwo5V5tJUgRP+HlrI4rf0vadMgVIKxVSUiqIzGREIkYrTAshFjkpHR5/R8s/kH
-Xm8q2gdoJltBFJzA2B8MHXVi7mYDBlUmBoRKhzkl/TSray9j7wKCAQEA15VsNQZu
-cZluboz/R4EDbEm1po2UBcNNiu/fgJ8BDUkLzJESIITY41fgvBbTun1fiuGeE+El
-0o1w4hQhIiV1KAB44w69fJR0VELfMZiIcd8kd0sDgPPVrd1MzzKPZ9yg4mbEkCCO
-V/EoTi8Ut27sMcl8059qm1qq7I5pzHwSziNa087m+5VdfmvJZJVipudngZ3QmRgU
-KKcBhgFFSkncYezoq2XQfRcqkk0sORxDvsMmRInyHZh0l9zv46ihgTvErlCHtizV
-V4HNO4OPz7FxUZ04iWSGZs4snu1cW2j+lbKuOkADveBYVmCcdZ3R0SH+A5skL0zG
-tm6z0TNP/kFlywKCAQEA+lTdFu2od0qTADujG4yemL7rn2J8EEhlU86J/LXo6UiM
-FFNz/5xltwIMkf00jqXswt9WR9W5cBBlQEFwZgu3v6YscebU6NE0k1sZZnshv8YK
-AjTRrfusSzdF3YyKLFp3QAE0tHs9cz9wMsyojiYZdZa3v1dTh503h9YQI+/DQEuA
-VIsZWfgPLEx5L231cZ9bz0GEQ3pN+nRUQdUYB0kCf8gC9YRy+lZ/y8gFeo9+SqVj
-sj1XlY1DnkiKRGAEfJbYBTra0woCz1LqVTMwLdLY2adAe9XrxQKu4OJovpUkJrSm
-yxnzJnt6DkLbdRxAki8K+LBsBGaCE67tqMhYkguOywKCAQAslEl77YiJFSEw2xcu
-wg7jJZrahgxF5Mz0HgYporek96Xo91a4QsBWwqVGP7IoriRDo8P8eGJJ19Wv6lmv
-pe9EBlT5HuMwD8K+adWde907Ltlrkad30vQsr8ZiUiI1Z/oc1wNuikzlAolDIZk3
-FUjiQrf9SsnQtj8CC7D1B/MbjVQK2I4LGCftLHzIv9tWiCNvOiMYhVIl1eMKwtiB
-NCTOWx8B0lv6gf/boPm0FZQsrk4LfjsCw7PYc2dnvEcpYiKZqS1nDn5PShgWZm4m
-lJrKNairQI5KU/gGJS8j9+ItMnW0tegQK4QY2IGCENCCXnUYacxhu46byuiEKggw
-m3VhAoIBAQCQa90StsZHqZ+J83do3kpvD+O5nURPnckznC2WJgraW49k5vltnJTT
-zkFTqHMLfmYwAz1o15sPCqlkMD+fEUzg6Hpzxm7dOUppkf5KFbD7AnsYU9U8LamJ
-HaET7Dq5TpjG7uoaHZZjs7cCHcWu2E8nIezyAtZ+rbTg/qW7bYMAlJTkerznGuDU
-v0hNzCr/81o5rbX0UhetcmKVOprUSWzfrw5ElLhAtzM7zivbZSnsOny8pC33FtQ5
-iQbVcNGUjfFCM95ZipxxN9z0FwxpJ1paCPGYA86u2olWl/VnVPqEj7WYzO8H5W2q
-aXpWH6HVf6B10pQrWWwUAAHyqYS5bZkQ
------END PRIVATE KEY-----
diff --git a/qa/apm/src/test/java/org/elasticsearch/telemetry/apm/ApmIT.java b/qa/apm/src/test/java/org/elasticsearch/telemetry/apm/ApmIT.java
deleted file mode 100644
index 021d9f8d01bf..000000000000
--- a/qa/apm/src/test/java/org/elasticsearch/telemetry/apm/ApmIT.java
+++ /dev/null
@@ -1,210 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-package org.elasticsearch.telemetry.apm;
-
-import org.elasticsearch.client.Request;
-import org.elasticsearch.client.Response;
-import org.elasticsearch.common.settings.SecureString;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.util.concurrent.ThreadContext;
-import org.elasticsearch.common.xcontent.support.XContentMapValues;
-import org.elasticsearch.core.CheckedRunnable;
-import org.elasticsearch.test.rest.ESRestTestCase;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicReference;
-import java.util.stream.Collectors;
-
-import static org.hamcrest.Matchers.empty;
-import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.hasKey;
-import static org.hamcrest.Matchers.not;
-
-/**
- * Tests around Elasticsearch's tracing support using APM.
- */
-public class ApmIT extends ESRestTestCase {
-
- private static final String DATA_STREAM = "traces-apm-default";
-
- /**
- * Check that if we send HTTP traffic to Elasticsearch, then traces are captured in APM server. The traces are generated in
- * a separate Docker container, which continually fetches `/_nodes/stats`. We check for the following:
- *
- * - A transaction for the REST API call
- *
- A span for the task started by the REST call
- *
- A child span started by the above span
- *
- * This proves that the hierarchy of spans is being correctly captured.
- */
- @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/90308")
- public void testCapturesTracesForHttpTraffic() throws Exception {
- checkTracesDataStream();
-
- assertTracesExist();
- }
-
- private void checkTracesDataStream() throws Exception {
- assertBusy(() -> {
- final Response response = performRequestTolerantly(new Request("GET", "/_data_stream/" + DATA_STREAM));
- assertOK(response);
- }, 1, TimeUnit.MINUTES);
- }
-
- private void assertTracesExist() throws Exception {
- // First look for a transaction for the REST calls that we make via the `tracegenerator` Docker container
-
- final AtomicReference transactionId = new AtomicReference<>();
- assertBusy(() -> {
- final Request tracesSearchRequest = new Request("GET", "/" + DATA_STREAM + "/_search");
- tracesSearchRequest.setJsonEntity("""
- {
- "query": {
- "match": { "transaction.name": "GET /_nodes/stats" }
- }
- }""");
- final Response tracesSearchResponse = performRequestTolerantly(tracesSearchRequest);
- assertOK(tracesSearchResponse);
-
- final List
*/
@Override public T visitStringArrayLiteral(EsqlBaseParser.StringArrayLiteralContext ctx) { return visitChildren(ctx); }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation returns the result of calling
+ * {@link #visitChildren} on {@code ctx}.
+ */
+ @Override public T visitInputParam(EsqlBaseParser.InputParamContext ctx) { return visitChildren(ctx); }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation returns the result of calling
+ * {@link #visitChildren} on {@code ctx}.
+ */
+ @Override public T visitInputNamedOrPositionalParam(EsqlBaseParser.InputNamedOrPositionalParamContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java
index 0c53191ab9ab..081deb03e835 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java
@@ -526,17 +526,17 @@ public interface EsqlBaseParserListener extends ParseTreeListener {
*/
void exitBooleanLiteral(EsqlBaseParser.BooleanLiteralContext ctx);
/**
- * Enter a parse tree produced by the {@code inputParam}
+ * Enter a parse tree produced by the {@code inputParams}
* labeled alternative in {@link EsqlBaseParser#constant}.
* @param ctx the parse tree
*/
- void enterInputParam(EsqlBaseParser.InputParamContext ctx);
+ void enterInputParams(EsqlBaseParser.InputParamsContext ctx);
/**
- * Exit a parse tree produced by the {@code inputParam}
+ * Exit a parse tree produced by the {@code inputParams}
* labeled alternative in {@link EsqlBaseParser#constant}.
* @param ctx the parse tree
*/
- void exitInputParam(EsqlBaseParser.InputParamContext ctx);
+ void exitInputParams(EsqlBaseParser.InputParamsContext ctx);
/**
* Enter a parse tree produced by the {@code stringLiteral}
* labeled alternative in {@link EsqlBaseParser#constant}.
@@ -585,6 +585,30 @@ public interface EsqlBaseParserListener extends ParseTreeListener {
* @param ctx the parse tree
*/
void exitStringArrayLiteral(EsqlBaseParser.StringArrayLiteralContext ctx);
+ /**
+ * Enter a parse tree produced by the {@code inputParam}
+ * labeled alternative in {@link EsqlBaseParser#params}.
+ * @param ctx the parse tree
+ */
+ void enterInputParam(EsqlBaseParser.InputParamContext ctx);
+ /**
+ * Exit a parse tree produced by the {@code inputParam}
+ * labeled alternative in {@link EsqlBaseParser#params}.
+ * @param ctx the parse tree
+ */
+ void exitInputParam(EsqlBaseParser.InputParamContext ctx);
+ /**
+ * Enter a parse tree produced by the {@code inputNamedOrPositionalParam}
+ * labeled alternative in {@link EsqlBaseParser#params}.
+ * @param ctx the parse tree
+ */
+ void enterInputNamedOrPositionalParam(EsqlBaseParser.InputNamedOrPositionalParamContext ctx);
+ /**
+ * Exit a parse tree produced by the {@code inputNamedOrPositionalParam}
+ * labeled alternative in {@link EsqlBaseParser#params}.
+ * @param ctx the parse tree
+ */
+ void exitInputNamedOrPositionalParam(EsqlBaseParser.InputNamedOrPositionalParamContext ctx);
/**
* Enter a parse tree produced by {@link EsqlBaseParser#limitCommand}.
* @param ctx the parse tree
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java
index 54f506c9d3b3..d1ffbd5fa0b3 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java
@@ -317,12 +317,12 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor {
*/
T visitBooleanLiteral(EsqlBaseParser.BooleanLiteralContext ctx);
/**
- * Visit a parse tree produced by the {@code inputParam}
+ * Visit a parse tree produced by the {@code inputParams}
* labeled alternative in {@link EsqlBaseParser#constant}.
* @param ctx the parse tree
* @return the visitor result
*/
- T visitInputParam(EsqlBaseParser.InputParamContext ctx);
+ T visitInputParams(EsqlBaseParser.InputParamsContext ctx);
/**
* Visit a parse tree produced by the {@code stringLiteral}
* labeled alternative in {@link EsqlBaseParser#constant}.
@@ -351,6 +351,20 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor {
* @return the visitor result
*/
T visitStringArrayLiteral(EsqlBaseParser.StringArrayLiteralContext ctx);
+ /**
+ * Visit a parse tree produced by the {@code inputParam}
+ * labeled alternative in {@link EsqlBaseParser#params}.
+ * @param ctx the parse tree
+ * @return the visitor result
+ */
+ T visitInputParam(EsqlBaseParser.InputParamContext ctx);
+ /**
+ * Visit a parse tree produced by the {@code inputNamedOrPositionalParam}
+ * labeled alternative in {@link EsqlBaseParser#params}.
+ * @param ctx the parse tree
+ * @return the visitor result
+ */
+ T visitInputNamedOrPositionalParam(EsqlBaseParser.InputNamedOrPositionalParamContext ctx);
/**
* Visit a parse tree produced by {@link EsqlBaseParser#limitCommand}.
* @param ctx the parse tree
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java
index 147d946dcef0..56822386b295 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlParser.java
@@ -22,23 +22,23 @@ import org.elasticsearch.logging.Logger;
import org.elasticsearch.xpack.esql.core.parser.CaseChangingCharStream;
import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan;
-import java.util.HashMap;
-import java.util.List;
+import java.util.BitSet;
import java.util.Map;
import java.util.function.BiFunction;
import java.util.function.Function;
import static org.elasticsearch.xpack.esql.core.parser.ParserUtils.source;
+import static org.elasticsearch.xpack.esql.core.util.StringUtils.isInteger;
public class EsqlParser {
private static final Logger log = LogManager.getLogger(EsqlParser.class);
public LogicalPlan createStatement(String query) {
- return createStatement(query, List.of());
+ return createStatement(query, QueryParams.EMPTY);
}
- public LogicalPlan createStatement(String query, List params) {
+ public LogicalPlan createStatement(String query, QueryParams params) {
if (log.isDebugEnabled()) {
log.debug("Parsing as statement: {}", query);
}
@@ -47,7 +47,7 @@ public class EsqlParser {
private T invokeParser(
String query,
- List params,
+ QueryParams params,
Function parseFunction,
BiFunction result
) {
@@ -57,8 +57,8 @@ public class EsqlParser {
lexer.removeErrorListeners();
lexer.addErrorListener(ERROR_LISTENER);
- Map paramTokens = new HashMap<>();
- TokenSource tokenSource = new ParametrizedTokenSource(lexer, paramTokens, params);
+ Map positionalParamTokens = params.positionalParamTokens();
+ TokenSource tokenSource = new ParametrizedTokenSource(lexer, positionalParamTokens, params);
CommonTokenStream tokenStream = new CommonTokenStream(tokenSource);
EsqlBaseParser parser = new EsqlBaseParser(tokenStream);
@@ -76,7 +76,7 @@ public class EsqlParser {
log.trace("Parse tree: {}", tree.toStringTree());
}
- return result.apply(new AstBuilder(paramTokens), tree);
+ return result.apply(new AstBuilder(params), tree);
} catch (StackOverflowError e) {
throw new ParsingException("ESQL statement is too large, causing stack overflow when generating the parsing tree: [{}]", query);
}
@@ -119,11 +119,14 @@ public class EsqlParser {
private static class ParametrizedTokenSource implements TokenSource {
private TokenSource delegate;
- private Map paramTokens;
+ private Map paramTokens;
private int param;
- private List params;
+ private QueryParams params;
+ private BitSet paramTypes = new BitSet(3);
+ private static String message = "Inconsistent parameter declaration, "
+ + "use one of positional, named or anonymous params but not a combination of ";
- ParametrizedTokenSource(TokenSource delegate, Map paramTokens, List params) {
+ ParametrizedTokenSource(TokenSource delegate, Map paramTokens, QueryParams params) {
this.delegate = delegate;
this.paramTokens = paramTokens;
this.params = params;
@@ -134,12 +137,21 @@ public class EsqlParser {
public Token nextToken() {
Token token = delegate.nextToken();
if (token.getType() == EsqlBaseLexer.PARAM) {
- if (param >= params.size()) {
- throw new ParsingException("Not enough actual parameters {}", params.size());
+ checkAnonymousParam(token);
+ if (param >= params.positionalParams().size()) {
+ throw new ParsingException(source(token), "Not enough actual parameters {}", params.positionalParams().size());
}
- paramTokens.put(token, params.get(param));
+ paramTokens.put(token, params.positionalParams().get(param));
param++;
}
+
+ if (token.getType() == EsqlBaseLexer.NAMED_OR_POSITIONAL_PARAM) {
+ if (isInteger(token.getText().substring(1))) {
+ checkPositionalParam(token);
+ } else {
+ checkNamedParam(token);
+ }
+ }
return token;
}
@@ -172,5 +184,26 @@ public class EsqlParser {
public TokenFactory> getTokenFactory() {
return delegate.getTokenFactory();
}
+
+ private void checkAnonymousParam(Token token) {
+ paramTypes.set(0);
+ if (paramTypes.cardinality() > 1) {
+ throw new ParsingException(source(token), message + "anonymous and " + (paramTypes.get(1) ? "named" : "positional"));
+ }
+ }
+
+ private void checkNamedParam(Token token) {
+ paramTypes.set(1);
+ if (paramTypes.cardinality() > 1) {
+ throw new ParsingException(source(token), message + "named and " + (paramTypes.get(0) ? "anonymous" : "positional"));
+ }
+ }
+
+ private void checkPositionalParam(Token token) {
+ paramTypes.set(2);
+ if (paramTypes.cardinality() > 1) {
+ throw new ParsingException(source(token), message + "positional and " + (paramTypes.get(0) ? "anonymous" : "named"));
+ }
+ }
}
}
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java
index a32ae9167aeb..59801e59555b 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java
@@ -18,7 +18,6 @@ import org.apache.lucene.util.automaton.Operations;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.xpack.esql.core.InvalidArgumentException;
-import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException;
import org.elasticsearch.xpack.esql.core.expression.Alias;
import org.elasticsearch.xpack.esql.core.expression.Attribute;
import org.elasticsearch.xpack.esql.core.expression.Expression;
@@ -68,7 +67,6 @@ import java.time.temporal.TemporalAmount;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
-import java.util.Map;
import java.util.function.BiFunction;
import java.util.function.Consumer;
@@ -82,6 +80,7 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.TIME_DURATION;
import static org.elasticsearch.xpack.esql.core.util.NumericUtils.asLongUnsigned;
import static org.elasticsearch.xpack.esql.core.util.NumericUtils.unsignedLongAsNumber;
import static org.elasticsearch.xpack.esql.core.util.StringUtils.WILDCARD;
+import static org.elasticsearch.xpack.esql.core.util.StringUtils.isInteger;
import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.bigIntegerToUnsignedLong;
import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.parseTemporalAmout;
import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToIntegral;
@@ -95,9 +94,9 @@ public abstract class ExpressionBuilder extends IdentifierBuilder {
*/
public static final int MAX_EXPRESSION_DEPTH = 500;
- private final Map params;
+ protected final QueryParams params;
- ExpressionBuilder(Map params) {
+ ExpressionBuilder(QueryParams params) {
this.params = params;
}
@@ -691,62 +690,64 @@ public abstract class ExpressionBuilder extends IdentifierBuilder {
@Override
public Object visitInputParam(EsqlBaseParser.InputParamContext ctx) {
- TypedParamValue param = param(ctx.PARAM());
- DataType dataType = EsqlDataTypes.fromTypeName(param.type);
- Source source = source(ctx);
- if (dataType == null) {
- throw new ParsingException(source, "Invalid parameter data type [{}]", param.type);
- }
- if (param.value == null) {
- // no conversion is required for null values
- return new Literal(source, null, dataType);
- }
- final DataType sourceType;
- try {
- sourceType = DataType.fromJava(param.value);
- } catch (QlIllegalArgumentException ex) {
- throw new ParsingException(
- ex,
- source,
- "Unexpected actual parameter type [{}] for type [{}]",
- param.value.getClass().getName(),
- param.type
- );
- }
- if (sourceType == dataType) {
- // no conversion is required if the value is already have correct type
- return new Literal(source, param.value, dataType);
- }
- // otherwise we need to make sure that xcontent-serialized value is converted to the correct type
- try {
-
- if (EsqlDataTypeConverter.canConvert(sourceType, dataType) == false) {
- throw new ParsingException(
- source,
- "Cannot cast value [{}] of type [{}] to parameter type [{}]",
- param.value,
- sourceType,
- dataType
- );
- }
- return new Literal(source, EsqlDataTypeConverter.converterFor(sourceType, dataType).convert(param.value), dataType);
- } catch (QlIllegalArgumentException ex) {
- throw new ParsingException(ex, source, "Unexpected actual parameter type [{}] for type [{}]", sourceType, param.type);
- }
+ QueryParam param = paramByToken(ctx.PARAM());
+ return visitParam(ctx, param);
}
- private TypedParamValue param(TerminalNode node) {
+ @Override
+ public Object visitInputNamedOrPositionalParam(EsqlBaseParser.InputNamedOrPositionalParamContext ctx) {
+ QueryParam param = paramByNameOrPosition(ctx.NAMED_OR_POSITIONAL_PARAM());
+ if (param == null) {
+ return Literal.NULL;
+ }
+ return visitParam(ctx, param);
+ }
+
+ private Object visitParam(EsqlBaseParser.ParamsContext ctx, QueryParam param) {
+ Source source = source(ctx);
+ DataType type = param.type();
+ return new Literal(source, param.value(), type);
+ }
+
+ QueryParam paramByToken(TerminalNode node) {
if (node == null) {
return null;
}
-
Token token = node.getSymbol();
-
- if (params.containsKey(token) == false) {
+ if (params.contains(token) == false) {
throw new ParsingException(source(node), "Unexpected parameter");
}
-
return params.get(token);
}
+ QueryParam paramByNameOrPosition(TerminalNode node) {
+ if (node == null) {
+ return null;
+ }
+ Token token = node.getSymbol();
+ String nameOrPosition = token.getText().substring(1);
+ if (isInteger(nameOrPosition)) {
+ int index = Integer.parseInt(nameOrPosition);
+ if (params.get(index) == null) {
+ String message = "";
+ int np = params.positionalParams().size();
+ if (np > 0) {
+ message = ", did you mean " + (np == 1 ? "position 1?" : "any position between 1 and " + np + "?");
+ }
+ params.addParsingError(new ParsingException(source(node), "No parameter is defined for position " + index + message));
+ }
+ return params.get(index);
+ } else {
+ if (params.contains(nameOrPosition) == false) {
+ String message = "";
+ List potentialMatches = StringUtils.findSimilar(nameOrPosition, params.namedParams().keySet());
+ if (potentialMatches.size() > 0) {
+ message = ", did you mean "
+ + (potentialMatches.size() == 1 ? "[" + potentialMatches.get(0) + "]?" : "any of " + potentialMatches + "?");
+ }
+ params.addParsingError(new ParsingException(source(node), "Unknown query parameter [" + nameOrPosition + "]" + message));
+ }
+ return params.get(nameOrPosition);
+ }
+ }
}
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java
index ec238c1fcf31..f829a7cb6ed0 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java
@@ -85,12 +85,25 @@ public class LogicalPlanBuilder extends ExpressionBuilder {
*/
public static final int MAX_QUERY_DEPTH = 500;
- public LogicalPlanBuilder(Map params) {
+ public LogicalPlanBuilder(QueryParams params) {
super(params);
}
protected LogicalPlan plan(ParseTree ctx) {
- return ParserUtils.typedParsing(this, ctx, LogicalPlan.class);
+ LogicalPlan p = ParserUtils.typedParsing(this, ctx, LogicalPlan.class);
+ var errors = this.params.parsingErrors();
+ if (errors.isEmpty()) {
+ return p;
+ } else {
+ StringBuilder message = new StringBuilder();
+ for (int i = 0; i < errors.size(); i++) {
+ if (i > 0) {
+ message.append("; ");
+ }
+ message.append(errors.get(i).getMessage());
+ }
+ throw new ParsingException(message.toString());
+ }
}
protected List plans(List extends ParserRuleContext> ctxs) {
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/QueryParam.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/QueryParam.java
new file mode 100644
index 000000000000..022c18fdc586
--- /dev/null
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/QueryParam.java
@@ -0,0 +1,24 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+package org.elasticsearch.xpack.esql.parser;
+
+import org.elasticsearch.xpack.esql.core.type.DataType;
+
+/**
+ * Represent a strongly typed parameter value
+ */
+public record QueryParam(String name, Object value, DataType type) {
+
+ public String nameValue() {
+ return "{" + (this.name == null ? "" : this.name + ":") + this.value + "}";
+ }
+
+ @Override
+ public String toString() {
+ return value + " [" + name + "][" + type + "]";
+ }
+}
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/QueryParams.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/QueryParams.java
new file mode 100644
index 000000000000..ebba6d3d0b48
--- /dev/null
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/QueryParams.java
@@ -0,0 +1,83 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.parser;
+
+import org.antlr.v4.runtime.Token;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class QueryParams {
+
+ public static final QueryParams EMPTY = new QueryParams();
+
+ // This matches the named or unnamed parameters specified in an EsqlQueryRequest.params.
+ private List params = new ArrayList<>();
+
+ // This matches the named parameters specified in an EsqlQueryRequest.params.
+ private Map nameToParam = new HashMap<>();
+
+ // This is populated by EsqlParser, each parameter marker has an entry.
+ private Map tokenToParam = new HashMap<>();
+
+ private List parsingErrors = new ArrayList<>();
+
+ public QueryParams() {}
+
+ public QueryParams(List params) {
+ for (QueryParam p : params) {
+ this.params.add(p);
+ String name = p.name();
+ if (name != null) {
+ nameToParam.put(name, p);
+ }
+ }
+ }
+
+ public List positionalParams() {
+ return this.params;
+ }
+
+ public QueryParam get(int index) {
+ return (index <= 0 || index > params.size()) ? null : params.get(index - 1);
+ }
+
+ public Map namedParams() {
+ return this.nameToParam;
+ }
+
+ public boolean contains(String paramName) {
+ return this.nameToParam.containsKey(paramName);
+ }
+
+ public QueryParam get(String paramName) {
+ return nameToParam.get(paramName);
+ }
+
+ public Map positionalParamTokens() {
+ return this.tokenToParam;
+ }
+
+ public boolean contains(Token token) {
+ return this.tokenToParam.containsKey(token);
+ }
+
+ public QueryParam get(Token tokenLocation) {
+ return this.tokenToParam.get(tokenLocation);
+ }
+
+ public List parsingErrors() {
+ return this.parsingErrors;
+ }
+
+ public void addParsingError(ParsingException e) {
+ this.parsingErrors.add(e);
+ }
+}
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/TypedParamValue.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/TypedParamValue.java
deleted file mode 100644
index 74cc53e51b36..000000000000
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/TypedParamValue.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0; you may not use this file except in compliance with the Elastic License
- * 2.0.
- */
-package org.elasticsearch.xpack.esql.parser;
-
-import java.util.Objects;
-
-/**
- * Represent a strongly typed parameter value
- */
-public class TypedParamValue {
-
- public final Object value;
- public final String type;
- private boolean hasExplicitType; // the type is explicitly set in the request or inferred by the parser
- private ContentLocation tokenLocation; // location of the token failing the parsing rules
-
- public TypedParamValue(String type, Object value) {
- this(type, value, true);
- }
-
- public TypedParamValue(String type, Object value, boolean hasExplicitType) {
- this.value = value;
- this.type = type;
- this.hasExplicitType = hasExplicitType;
- }
-
- public boolean hasExplicitType() {
- return hasExplicitType;
- }
-
- public void hasExplicitType(boolean hasExplicitType) {
- this.hasExplicitType = hasExplicitType;
- }
-
- public ContentLocation tokenLocation() {
- return tokenLocation;
- }
-
- public void tokenLocation(ContentLocation tokenLocation) {
- this.tokenLocation = tokenLocation;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) {
- return true;
- }
- if (o == null || getClass() != o.getClass()) {
- return false;
- }
- TypedParamValue that = (TypedParamValue) o;
- return Objects.equals(value, that.value)
- && Objects.equals(type, that.type)
- && Objects.equals(hasExplicitType, that.hasExplicitType);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(value, type, hasExplicitType);
- }
-
- @Override
- public String toString() {
- return String.valueOf(value) + " [" + type + "][" + hasExplicitType + "][" + tokenLocation + "]";
- }
-}
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java
index 10116bb24e6b..1f5374b73466 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java
@@ -46,7 +46,7 @@ import org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer;
import org.elasticsearch.xpack.esql.optimizer.PhysicalOptimizerContext;
import org.elasticsearch.xpack.esql.optimizer.PhysicalPlanOptimizer;
import org.elasticsearch.xpack.esql.parser.EsqlParser;
-import org.elasticsearch.xpack.esql.parser.TypedParamValue;
+import org.elasticsearch.xpack.esql.parser.QueryParams;
import org.elasticsearch.xpack.esql.plan.logical.Aggregate;
import org.elasticsearch.xpack.esql.plan.logical.Enrich;
import org.elasticsearch.xpack.esql.plan.logical.Keep;
@@ -139,7 +139,7 @@ public class EsqlSession {
);
}
- private LogicalPlan parse(String query, List params) {
+ private LogicalPlan parse(String query, QueryParams params) {
var parsed = new EsqlParser().createStatement(query, params);
LOGGER.debug("Parsed logical plan:\n{}", parsed);
return parsed;
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java
index ab0bc114a276..a8ad53b8bc66 100644
--- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java
@@ -29,12 +29,13 @@ import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.tasks.TaskInfo;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.NamedXContentRegistry;
+import org.elasticsearch.xcontent.XContentParseException;
import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentParserConfiguration;
import org.elasticsearch.xcontent.XContentType;
import org.elasticsearch.xpack.esql.Column;
import org.elasticsearch.xpack.esql.core.type.DataType;
-import org.elasticsearch.xpack.esql.parser.TypedParamValue;
+import org.elasticsearch.xpack.esql.parser.QueryParam;
import java.io.IOException;
import java.util.ArrayList;
@@ -57,7 +58,7 @@ public class EsqlQueryRequestTests extends ESTestCase {
Locale locale = randomLocale(random());
QueryBuilder filter = randomQueryBuilder();
- List params = randomParameters();
+ List params = randomParameters();
boolean hasParams = params.isEmpty() == false;
StringBuilder paramsString = paramsString(params, hasParams);
String json = String.format(Locale.ROOT, """
@@ -75,20 +76,119 @@ public class EsqlQueryRequestTests extends ESTestCase {
assertEquals(locale.toLanguageTag(), request.locale().toLanguageTag());
assertEquals(locale, request.locale());
assertEquals(filter, request.filter());
-
- assertEquals(params.size(), request.params().size());
+ assertEquals(params.size(), request.params().positionalParams().size());
for (int i = 0; i < params.size(); i++) {
- assertEquals(params.get(i), request.params().get(i));
+ assertEquals(params.get(i), request.params().positionalParams().get(i));
}
}
+ public void testNamedParams() throws IOException {
+ String query = randomAlphaOfLengthBetween(1, 100);
+ boolean columnar = randomBoolean();
+ Locale locale = randomLocale(random());
+ QueryBuilder filter = randomQueryBuilder();
+
+ String paramsString = """
+ ,"params":[ {"n1" : "8.15.0" }, { "n2" : 0.05 }, {"n3" : -799810013 },
+ {"n4" : "127.0.0.1"}, {"n5" : "esql"}, {"n_6" : null}, {"n7_" : false}] }""";
+ List params = new ArrayList<>(4);
+ params.add(new QueryParam("n1", "8.15.0", DataType.KEYWORD));
+ params.add(new QueryParam("n2", 0.05, DataType.DOUBLE));
+ params.add(new QueryParam("n3", -799810013, DataType.INTEGER));
+ params.add(new QueryParam("n4", "127.0.0.1", DataType.KEYWORD));
+ params.add(new QueryParam("n5", "esql", DataType.KEYWORD));
+ params.add(new QueryParam("n_6", null, DataType.NULL));
+ params.add(new QueryParam("n7_", false, DataType.BOOLEAN));
+ String json = String.format(Locale.ROOT, """
+ {
+ "query": "%s",
+ "columnar": %s,
+ "locale": "%s",
+ "filter": %s
+ %s""", query, columnar, locale.toLanguageTag(), filter, paramsString);
+
+ EsqlQueryRequest request = parseEsqlQueryRequestSync(json);
+
+ assertEquals(query, request.query());
+ assertEquals(columnar, request.columnar());
+ assertEquals(locale.toLanguageTag(), request.locale().toLanguageTag());
+ assertEquals(locale, request.locale());
+ assertEquals(filter, request.filter());
+ assertEquals(params.size(), request.params().positionalParams().size());
+
+ for (int i = 0; i < request.params().positionalParams().size(); i++) {
+ assertEquals(params.get(i), request.params().positionalParams().get(i));
+ }
+ }
+
+ public void testInvalidParams() throws IOException {
+ String query = randomAlphaOfLengthBetween(1, 100);
+ boolean columnar = randomBoolean();
+ Locale locale = randomLocale(random());
+ QueryBuilder filter = randomQueryBuilder();
+
+ String paramsString1 = """
+ "params":[ {"1" : "v1" }, {"1x" : "v1" }, {"_a" : "v1" }, {"@-#" : "v1" }, 1, 2]""";
+ String json1 = String.format(Locale.ROOT, """
+ {
+ %s
+ "query": "%s",
+ "columnar": %s,
+ "locale": "%s",
+ "filter": %s
+ }""", paramsString1, query, columnar, locale.toLanguageTag(), filter);
+
+ Exception e1 = expectThrows(XContentParseException.class, () -> parseEsqlQueryRequestSync(json1));
+ assertThat(
+ e1.getCause().getMessage(),
+ containsString(
+ "Failed to parse params: [2:16] [1] is not a valid parameter name, "
+ + "a valid parameter name starts with a letter and contains letters, digits and underscores only"
+ )
+ );
+ assertThat(e1.getCause().getMessage(), containsString("[2:31] [1x] is not a valid parameter name"));
+ assertThat(e1.getCause().getMessage(), containsString("[2:47] [_a] is not a valid parameter name"));
+ assertThat(e1.getCause().getMessage(), containsString("[2:63] [@-#] is not a valid parameter name"));
+ assertThat(
+ e1.getCause().getMessage(),
+ containsString(
+ "Params cannot contain both named and unnamed parameters; got [{1:v1}, {1x:v1}, {_a:v1}, {@-#:v1}] and [{1}, {2}]"
+ )
+ );
+
+ String paramsString2 = """
+ "params":[ 1, 2, {"1" : "v1" }, {"1x" : "v1" }]""";
+ String json2 = String.format(Locale.ROOT, """
+ {
+ %s
+ "query": "%s",
+ "columnar": %s,
+ "locale": "%s",
+ "filter": %s
+ }""", paramsString2, query, columnar, locale.toLanguageTag(), filter);
+
+ Exception e2 = expectThrows(XContentParseException.class, () -> parseEsqlQueryRequestSync(json2));
+ assertThat(
+ e2.getCause().getMessage(),
+ containsString(
+ "Failed to parse params: [2:22] [1] is not a valid parameter name, "
+ + "a valid parameter name starts with a letter and contains letters, digits and underscores only"
+ )
+ );
+ assertThat(e2.getCause().getMessage(), containsString("[2:37] [1x] is not a valid parameter name"));
+ assertThat(
+ e2.getCause().getMessage(),
+ containsString("Params cannot contain both named and unnamed parameters; got [{1:v1}, {1x:v1}] and [{1}, {2}]")
+ );
+ }
+
public void testParseFieldsForAsync() throws IOException {
String query = randomAlphaOfLengthBetween(1, 100);
boolean columnar = randomBoolean();
Locale locale = randomLocale(random());
QueryBuilder filter = randomQueryBuilder();
- List params = randomParameters();
+ List params = randomParameters();
boolean hasParams = params.isEmpty() == false;
StringBuilder paramsString = paramsString(params, hasParams);
boolean keepOnCompletion = randomBoolean();
@@ -126,10 +226,9 @@ public class EsqlQueryRequestTests extends ESTestCase {
assertEquals(keepOnCompletion, request.keepOnCompletion());
assertEquals(waitForCompletion, request.waitForCompletionTimeout());
assertEquals(keepAlive, request.keepAlive());
-
- assertEquals(params.size(), request.params().size());
+ assertEquals(params.size(), request.params().positionalParams().size());
for (int i = 0; i < params.size(); i++) {
- assertEquals(params.get(i), request.params().get(i));
+ assertEquals(params.get(i), request.params().positionalParams().get(i));
}
}
@@ -378,22 +477,21 @@ public class EsqlQueryRequestTests extends ESTestCase {
assertThat(json, equalTo(expected));
}
- private List randomParameters() {
+ private List randomParameters() {
if (randomBoolean()) {
return Collections.emptyList();
} else {
int len = randomIntBetween(1, 10);
- List arr = new ArrayList<>(len);
+ List arr = new ArrayList<>(len);
for (int i = 0; i < len; i++) {
- boolean hasExplicitType = randomBoolean();
@SuppressWarnings("unchecked")
- Supplier supplier = randomFrom(
- () -> new TypedParamValue("boolean", randomBoolean(), hasExplicitType),
- () -> new TypedParamValue("integer", randomInt(), hasExplicitType),
- () -> new TypedParamValue("long", randomLong(), hasExplicitType),
- () -> new TypedParamValue("double", randomDouble(), hasExplicitType),
- () -> new TypedParamValue("null", null, hasExplicitType),
- () -> new TypedParamValue("keyword", randomAlphaOfLength(10), hasExplicitType)
+ Supplier supplier = randomFrom(
+ () -> new QueryParam(null, randomBoolean(), DataType.BOOLEAN),
+ () -> new QueryParam(null, randomInt(), DataType.INTEGER),
+ () -> new QueryParam(null, randomLong(), DataType.LONG),
+ () -> new QueryParam(null, randomDouble(), DataType.DOUBLE),
+ () -> new QueryParam(null, null, DataType.NULL),
+ () -> new QueryParam(null, randomAlphaOfLength(10), DataType.KEYWORD)
);
arr.add(supplier.get());
}
@@ -401,33 +499,22 @@ public class EsqlQueryRequestTests extends ESTestCase {
}
}
- private StringBuilder paramsString(List params, boolean hasParams) {
+ private StringBuilder paramsString(List params, boolean hasParams) {
StringBuilder paramsString = new StringBuilder();
if (hasParams) {
paramsString.append(",\"params\":[");
boolean first = true;
- for (TypedParamValue param : params) {
+ for (QueryParam param : params) {
if (first == false) {
paramsString.append(", ");
}
first = false;
- if (param.hasExplicitType()) {
- paramsString.append("{\"type\":\"");
- paramsString.append(param.type);
- paramsString.append("\",\"value\":");
- }
- switch (param.type) {
- case "keyword" -> {
- paramsString.append("\"");
- paramsString.append(param.value);
- paramsString.append("\"");
- }
- case "integer", "long", "boolean", "null", "double" -> {
- paramsString.append(param.value);
- }
- }
- if (param.hasExplicitType()) {
- paramsString.append("}");
+ if (param.type() == DataType.KEYWORD) {
+ paramsString.append("\"");
+ paramsString.append(param.value());
+ paramsString.append("\"");
+ } else if (param.type().isNumeric() || param.type() == DataType.BOOLEAN || param.type() == DataType.NULL) {
+ paramsString.append(param.value());
}
}
paramsString.append("]}");
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java
index 07a364bf9196..e5f59f1ffa8a 100644
--- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java
@@ -11,7 +11,8 @@ import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.esql.VerificationException;
import org.elasticsearch.xpack.esql.core.type.DataType;
import org.elasticsearch.xpack.esql.parser.EsqlParser;
-import org.elasticsearch.xpack.esql.parser.TypedParamValue;
+import org.elasticsearch.xpack.esql.parser.QueryParam;
+import org.elasticsearch.xpack.esql.parser.QueryParams;
import org.elasticsearch.xpack.esql.type.EsqlDataTypes;
import java.util.ArrayList;
@@ -19,6 +20,8 @@ import java.util.List;
import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning;
import static org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils.loadMapping;
+import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD;
+import static org.elasticsearch.xpack.esql.core.type.DataType.NULL;
import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
@@ -554,21 +557,21 @@ public class VerifierTests extends ESTestCase {
}
private String error(String query, Analyzer analyzer, Object... params) {
- List parameters = new ArrayList<>();
+ List parameters = new ArrayList<>();
for (Object param : params) {
if (param == null) {
- parameters.add(new TypedParamValue("null", null));
+ parameters.add(new QueryParam(null, null, NULL));
} else if (param instanceof String) {
- parameters.add(new TypedParamValue("keyword", param));
+ parameters.add(new QueryParam(null, param, KEYWORD));
} else if (param instanceof Number) {
- parameters.add(new TypedParamValue("param", param));
+ parameters.add(new QueryParam(null, param, EsqlDataTypes.fromJava(param)));
} else {
throw new IllegalArgumentException("VerifierTests don't support params of type " + param.getClass());
}
}
VerificationException e = expectThrows(
VerificationException.class,
- () -> analyzer.analyze(parser.createStatement(query, parameters))
+ () -> analyzer.analyze(parser.createStatement(query, new QueryParams(parameters)))
);
String message = e.getMessage();
assertTrue(message.startsWith("Found "));
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java
index 884b24fc0fc5..28662c2470f1 100644
--- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java
@@ -7,7 +7,6 @@
package org.elasticsearch.xpack.esql.parser;
-import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Build;
import org.elasticsearch.core.Tuple;
import org.elasticsearch.index.IndexMode;
@@ -31,7 +30,6 @@ import org.elasticsearch.xpack.esql.core.plan.logical.Limit;
import org.elasticsearch.xpack.esql.core.plan.logical.LogicalPlan;
import org.elasticsearch.xpack.esql.core.plan.logical.OrderBy;
import org.elasticsearch.xpack.esql.core.type.DataType;
-import org.elasticsearch.xpack.esql.core.util.StringUtils;
import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike;
import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike;
import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add;
@@ -52,11 +50,8 @@ import org.elasticsearch.xpack.esql.plan.logical.Lookup;
import org.elasticsearch.xpack.esql.plan.logical.MvExpand;
import org.elasticsearch.xpack.esql.plan.logical.Project;
import org.elasticsearch.xpack.esql.plan.logical.Row;
-import org.elasticsearch.xpack.versionfield.Version;
import java.math.BigInteger;
-import java.time.Duration;
-import java.time.Period;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@@ -68,6 +63,7 @@ import static org.elasticsearch.xpack.esql.core.expression.Literal.FALSE;
import static org.elasticsearch.xpack.esql.core.expression.Literal.TRUE;
import static org.elasticsearch.xpack.esql.core.expression.function.FunctionResolutionStrategy.DEFAULT;
import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY;
+import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER;
import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD;
import static org.elasticsearch.xpack.esql.core.util.NumericUtils.asLongUnsigned;
import static org.elasticsearch.xpack.esql.parser.ExpressionBuilder.breakIntoFragments;
@@ -79,6 +75,7 @@ import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
+//@TestLogging(value = "org.elasticsearch.xpack.esql:TRACE", reason = "debug")
public class StatementParserTests extends ESTestCase {
private static String FROM = "from test";
@@ -409,7 +406,7 @@ public class StatementParserTests extends ESTestCase {
}
public void testLimitConstraints() {
- expectError("from text | limit -1", "extraneous input '-' expecting INTEGER_LITERAL");
+ expectError("from text | limit -1", "line 1:19: extraneous input '-' expecting INTEGER_LITERAL");
}
public void testBasicSortCommand() {
@@ -770,19 +767,23 @@ public class StatementParserTests extends ESTestCase {
public void testInputParams() {
LogicalPlan stm = statement(
- "row x = ?, y = ?, a = ?, b = ?, c = ?, d = ?",
- List.of(
- new TypedParamValue("integer", 1),
- new TypedParamValue("keyword", "2"),
- new TypedParamValue("date_period", "2 days"),
- new TypedParamValue("time_duration", "4 hours"),
- new TypedParamValue("version", "1.2.3"),
- new TypedParamValue("ip", "127.0.0.1")
+ "row x = ?, y = ?, a = ?, b = ?, c = ?, d = ?, e = ?-1, f = ?+1",
+ new QueryParams(
+ List.of(
+ new QueryParam(null, 1, INTEGER),
+ new QueryParam(null, "2", KEYWORD),
+ new QueryParam(null, "2 days", KEYWORD),
+ new QueryParam(null, "4 hours", KEYWORD),
+ new QueryParam(null, "1.2.3", KEYWORD),
+ new QueryParam(null, "127.0.0.1", KEYWORD),
+ new QueryParam(null, 10, INTEGER),
+ new QueryParam(null, 10, INTEGER)
+ )
)
);
assertThat(stm, instanceOf(Row.class));
Row row = (Row) stm;
- assertThat(row.fields().size(), is(6));
+ assertThat(row.fields().size(), is(8));
NamedExpression field = row.fields().get(0);
assertThat(field.name(), is("x"));
@@ -800,65 +801,346 @@ public class StatementParserTests extends ESTestCase {
assertThat(field.name(), is("a"));
assertThat(field, instanceOf(Alias.class));
alias = (Alias) field;
- assertThat(alias.child().fold(), is(Period.ofDays(2)));
+ assertThat(alias.child().fold(), is("2 days"));
field = row.fields().get(3);
assertThat(field.name(), is("b"));
assertThat(field, instanceOf(Alias.class));
alias = (Alias) field;
- assertThat(alias.child().fold(), is(Duration.ofHours(4)));
+ assertThat(alias.child().fold(), is("4 hours"));
field = row.fields().get(4);
assertThat(field.name(), is("c"));
assertThat(field, instanceOf(Alias.class));
alias = (Alias) field;
- assertThat(alias.child().fold().getClass(), is(BytesRef.class));
- assertThat(alias.child().fold().toString(), is(new Version("1.2.3").toBytesRef().toString()));
+ assertThat(alias.child().fold().getClass(), is(String.class));
+ assertThat(alias.child().fold().toString(), is("1.2.3"));
field = row.fields().get(5);
assertThat(field.name(), is("d"));
assertThat(field, instanceOf(Alias.class));
alias = (Alias) field;
- assertThat(alias.child().fold().getClass(), is(BytesRef.class));
- assertThat(alias.child().fold().toString(), is(StringUtils.parseIP("127.0.0.1").toString()));
- }
+ assertThat(alias.child().fold().getClass(), is(String.class));
+ assertThat(alias.child().fold().toString(), is("127.0.0.1"));
- public void testWrongIntervalParams() {
- expectError("row x = ?", List.of(new TypedParamValue("date_period", "12")), "Cannot parse [12] to DATE_PERIOD");
- expectError("row x = ?", List.of(new TypedParamValue("time_duration", "12")), "Cannot parse [12] to TIME_DURATION");
- expectError(
- "row x = ?",
- List.of(new TypedParamValue("date_period", "12 months foo")),
- "Cannot parse [12 months foo] to DATE_PERIOD"
- );
- expectError(
- "row x = ?",
- List.of(new TypedParamValue("time_duration", "12 minutes bar")),
- "Cannot parse [12 minutes bar] to TIME_DURATION"
- );
- expectError("row x = ?", List.of(new TypedParamValue("date_period", "12 foo")), "Unexpected time interval qualifier: 'foo'");
- expectError("row x = ?", List.of(new TypedParamValue("time_duration", "12 bar")), "Unexpected time interval qualifier: 'bar'");
- expectError("row x = ?", List.of(new TypedParamValue("date_period", "foo days")), "Cannot parse [foo days] to DATE_PERIOD");
- expectError(
- "row x = ?",
- List.of(new TypedParamValue("time_duration", "bar seconds")),
- "Cannot parse [bar seconds] to TIME_DURATION"
- );
+ field = row.fields().get(6);
+ assertThat(field.name(), is("e"));
+ assertThat(field, instanceOf(Alias.class));
+ alias = (Alias) field;
+ assertThat(alias.child().fold(), is(9));
- expectError(
- "row x = ?",
- List.of(new TypedParamValue("date_period", "2 minutes")),
- "Cannot parse [2 minutes] to DATE_PERIOD, did you mean TIME_DURATION?"
- );
- expectError(
- "row x = ?",
- List.of(new TypedParamValue("time_duration", "11 months")),
- "Cannot parse [11 months] to TIME_DURATION, did you mean DATE_PERIOD?"
- );
+ field = row.fields().get(7);
+ assertThat(field.name(), is("f"));
+ assertThat(field, instanceOf(Alias.class));
+ alias = (Alias) field;
+ assertThat(alias.child().fold(), is(11));
}
public void testMissingInputParams() {
- expectError("row x = ?, y = ?", List.of(new TypedParamValue("integer", 1)), "Not enough actual parameters 1");
+ expectError("row x = ?, y = ?", List.of(new QueryParam(null, 1, INTEGER)), "Not enough actual parameters 1");
+ }
+
+ public void testNamedParams() {
+ LogicalPlan stm = statement("row x=?name1, y = ?name1", new QueryParams(List.of(new QueryParam("name1", 1, INTEGER))));
+ assertThat(stm, instanceOf(Row.class));
+ Row row = (Row) stm;
+ assertThat(row.fields().size(), is(2));
+
+ NamedExpression field = row.fields().get(0);
+ assertThat(field.name(), is("x"));
+ assertThat(field, instanceOf(Alias.class));
+ Alias alias = (Alias) field;
+ assertThat(alias.child().fold(), is(1));
+
+ field = row.fields().get(1);
+ assertThat(field.name(), is("y"));
+ assertThat(field, instanceOf(Alias.class));
+ alias = (Alias) field;
+ assertThat(alias.child().fold(), is(1));
+ }
+
+ public void testInvalidNamedParams() {
+ expectError(
+ "from test | where x < ?n1 | eval y = ?n2",
+ List.of(new QueryParam("n1", 5, INTEGER)),
+ "Unknown query parameter [n2], did you mean [n1]?"
+ );
+
+ expectError(
+ "from test | where x < ?n1 | eval y = ?n2",
+ List.of(new QueryParam("n1", 5, INTEGER), new QueryParam("n3", 5, INTEGER)),
+ "Unknown query parameter [n2], did you mean any of [n1, n3]?"
+ );
+
+ expectError("from test | where x < ?_1", List.of(new QueryParam("_1", 5, INTEGER)), "extraneous input '_1' expecting ");
+
+ expectError("from test | where x < ?#1", List.of(new QueryParam("#1", 5, INTEGER)), "token recognition error at: '#'");
+
+ expectError(
+ "from test | where x < ??",
+ List.of(new QueryParam("n_1", 5, INTEGER), new QueryParam("n_2", 5, INTEGER)),
+ "extraneous input '?' expecting "
+ );
+ }
+
+ public void testPositionalParams() {
+ LogicalPlan stm = statement("row x=?1, y=?1", new QueryParams(List.of(new QueryParam(null, 1, INTEGER))));
+ assertThat(stm, instanceOf(Row.class));
+ Row row = (Row) stm;
+ assertThat(row.fields().size(), is(2));
+
+ NamedExpression field = row.fields().get(0);
+ assertThat(field.name(), is("x"));
+ assertThat(field, instanceOf(Alias.class));
+ Alias alias = (Alias) field;
+ assertThat(alias.child().fold(), is(1));
+
+ field = row.fields().get(1);
+ assertThat(field.name(), is("y"));
+ assertThat(field, instanceOf(Alias.class));
+ alias = (Alias) field;
+ assertThat(alias.child().fold(), is(1));
+ }
+
+ public void testInvalidPositionalParams() {
+ expectError(
+ "from test | where x < ?0",
+ List.of(new QueryParam(null, 5, INTEGER)),
+ "No parameter is defined for position 0, did you mean position 1"
+ );
+
+ expectError(
+ "from test | where x < ?2",
+ List.of(new QueryParam(null, 5, INTEGER)),
+ "No parameter is defined for position 2, did you mean position 1"
+ );
+
+ expectError(
+ "from test | where x < ?0 and y < ?2",
+ List.of(new QueryParam(null, 5, INTEGER)),
+ "line 1:24: No parameter is defined for position 0, did you mean position 1?; "
+ + "line 1:35: No parameter is defined for position 2, did you mean position 1?"
+ );
+
+ expectError(
+ "from test | where x < ?0 and y < ?2",
+ List.of(new QueryParam(null, 5, INTEGER)),
+ "No parameter is defined for position 2, did you mean position 1"
+ );
+
+ expectError(
+ "from test | where x < ?0",
+ List.of(new QueryParam(null, 5, INTEGER), new QueryParam(null, 10, INTEGER)),
+ "No parameter is defined for position 0, did you mean any position between 1 and 2?"
+ );
+ }
+
+ public void testParamInWhere() {
+ LogicalPlan plan = statement("from test | where x < ? | limit 10", new QueryParams(List.of(new QueryParam(null, 5, INTEGER))));
+ assertThat(plan, instanceOf(Limit.class));
+ Limit limit = (Limit) plan;
+ assertThat(limit.limit(), instanceOf(Literal.class));
+ assertThat(((Literal) limit.limit()).value(), equalTo(10));
+ assertThat(limit.children().size(), equalTo(1));
+ assertThat(limit.children().get(0), instanceOf(Filter.class));
+ Filter w = (Filter) limit.children().get(0);
+ assertThat(((Literal) w.condition().children().get(1)).value(), equalTo(5));
+ assertThat(limit.children().get(0).children().size(), equalTo(1));
+ assertThat(limit.children().get(0).children().get(0), instanceOf(EsqlUnresolvedRelation.class));
+
+ plan = statement("from test | where x < ?n1 | limit 10", new QueryParams(List.of(new QueryParam("n1", 5, INTEGER))));
+ assertThat(plan, instanceOf(Limit.class));
+ limit = (Limit) plan;
+ assertThat(limit.limit(), instanceOf(Literal.class));
+ assertThat(((Literal) limit.limit()).value(), equalTo(10));
+ assertThat(limit.children().size(), equalTo(1));
+ assertThat(limit.children().get(0), instanceOf(Filter.class));
+ w = (Filter) limit.children().get(0);
+ assertThat(((Literal) w.condition().children().get(1)).value(), equalTo(5));
+ assertThat(limit.children().get(0).children().size(), equalTo(1));
+ assertThat(limit.children().get(0).children().get(0), instanceOf(EsqlUnresolvedRelation.class));
+
+ plan = statement("from test | where x < ?1 | limit 10", new QueryParams(List.of(new QueryParam(null, 5, INTEGER))));
+ assertThat(plan, instanceOf(Limit.class));
+ limit = (Limit) plan;
+ assertThat(limit.limit(), instanceOf(Literal.class));
+ assertThat(((Literal) limit.limit()).value(), equalTo(10));
+ assertThat(limit.children().size(), equalTo(1));
+ assertThat(limit.children().get(0), instanceOf(Filter.class));
+ w = (Filter) limit.children().get(0);
+ assertThat(((Literal) w.condition().children().get(1)).value(), equalTo(5));
+ assertThat(limit.children().get(0).children().size(), equalTo(1));
+ assertThat(limit.children().get(0).children().get(0), instanceOf(EsqlUnresolvedRelation.class));
+ }
+
+ public void testParamInEval() {
+ LogicalPlan plan = statement(
+ "from test | where x < ? | eval y = ? + ? | limit 10",
+ new QueryParams(
+ List.of(new QueryParam(null, 5, INTEGER), new QueryParam(null, -1, INTEGER), new QueryParam(null, 100, INTEGER))
+ )
+ );
+ assertThat(plan, instanceOf(Limit.class));
+ Limit limit = (Limit) plan;
+ assertThat(limit.limit(), instanceOf(Literal.class));
+ assertThat(((Literal) limit.limit()).value(), equalTo(10));
+ assertThat(limit.children().size(), equalTo(1));
+ assertThat(limit.children().get(0), instanceOf(Eval.class));
+ Eval eval = (Eval) limit.children().get(0);
+ assertThat(((Literal) ((Add) eval.fields().get(0).child()).left()).value(), equalTo(-1));
+ assertThat(((Literal) ((Add) eval.fields().get(0).child()).right()).value(), equalTo(100));
+ Filter f = (Filter) eval.children().get(0);
+ assertThat(((Literal) f.condition().children().get(1)).value(), equalTo(5));
+ assertThat(f.children().size(), equalTo(1));
+ assertThat(f.children().get(0), instanceOf(EsqlUnresolvedRelation.class));
+
+ plan = statement(
+ "from test | where x < ?n1 | eval y = ?n2 + ?n3 | limit 10",
+ new QueryParams(
+ List.of(new QueryParam("n1", 5, INTEGER), new QueryParam("n2", -1, INTEGER), new QueryParam("n3", 100, INTEGER))
+ )
+ );
+ assertThat(plan, instanceOf(Limit.class));
+ limit = (Limit) plan;
+ assertThat(limit.limit(), instanceOf(Literal.class));
+ assertThat(((Literal) limit.limit()).value(), equalTo(10));
+ assertThat(limit.children().size(), equalTo(1));
+ assertThat(limit.children().get(0), instanceOf(Eval.class));
+ eval = (Eval) limit.children().get(0);
+ assertThat(((Literal) ((Add) eval.fields().get(0).child()).left()).value(), equalTo(-1));
+ assertThat(((Literal) ((Add) eval.fields().get(0).child()).right()).value(), equalTo(100));
+ f = (Filter) eval.children().get(0);
+ assertThat(((Literal) f.condition().children().get(1)).value(), equalTo(5));
+ assertThat(f.children().size(), equalTo(1));
+ assertThat(f.children().get(0), instanceOf(EsqlUnresolvedRelation.class));
+
+ plan = statement(
+ "from test | where x < ?1 | eval y = ?2 + ?1 | limit 10",
+ new QueryParams(List.of(new QueryParam(null, 5, INTEGER), new QueryParam(null, -1, INTEGER)))
+ );
+ assertThat(plan, instanceOf(Limit.class));
+ limit = (Limit) plan;
+ assertThat(limit.limit(), instanceOf(Literal.class));
+ assertThat(((Literal) limit.limit()).value(), equalTo(10));
+ assertThat(limit.children().size(), equalTo(1));
+ assertThat(limit.children().get(0), instanceOf(Eval.class));
+ eval = (Eval) limit.children().get(0);
+ assertThat(((Literal) ((Add) eval.fields().get(0).child()).left()).value(), equalTo(-1));
+ assertThat(((Literal) ((Add) eval.fields().get(0).child()).right()).value(), equalTo(5));
+ f = (Filter) eval.children().get(0);
+ assertThat(((Literal) f.condition().children().get(1)).value(), equalTo(5));
+ assertThat(f.children().size(), equalTo(1));
+ assertThat(f.children().get(0), instanceOf(EsqlUnresolvedRelation.class));
+ }
+
+ public void testParamInAggFunction() {
+ LogicalPlan plan = statement(
+ "from test | where x < ? | eval y = ? + ? | stats count(?) by z",
+ new QueryParams(
+ List.of(
+ new QueryParam(null, 5, INTEGER),
+ new QueryParam(null, -1, INTEGER),
+ new QueryParam(null, 100, INTEGER),
+ new QueryParam(null, "*", KEYWORD)
+ )
+ )
+ );
+ assertThat(plan, instanceOf(EsqlAggregate.class));
+ EsqlAggregate agg = (EsqlAggregate) plan;
+ assertThat(((Literal) agg.aggregates().get(0).children().get(0).children().get(0)).value(), equalTo("*"));
+ assertThat(agg.child(), instanceOf(Eval.class));
+ assertThat(agg.children().size(), equalTo(1));
+ assertThat(agg.children().get(0), instanceOf(Eval.class));
+ Eval eval = (Eval) agg.children().get(0);
+ assertThat(((Literal) ((Add) eval.fields().get(0).child()).left()).value(), equalTo(-1));
+ assertThat(((Literal) ((Add) eval.fields().get(0).child()).right()).value(), equalTo(100));
+ Filter f = (Filter) eval.children().get(0);
+ assertThat(((Literal) f.condition().children().get(1)).value(), equalTo(5));
+ assertThat(f.children().size(), equalTo(1));
+ assertThat(f.children().get(0), instanceOf(EsqlUnresolvedRelation.class));
+
+ plan = statement(
+ "from test | where x < ?n1 | eval y = ?n2 + ?n3 | stats count(?n4) by z",
+ new QueryParams(
+ List.of(
+ new QueryParam("n1", 5, INTEGER),
+ new QueryParam("n2", -1, INTEGER),
+ new QueryParam("n3", 100, INTEGER),
+ new QueryParam("n4", "*", KEYWORD)
+ )
+ )
+ );
+ assertThat(plan, instanceOf(EsqlAggregate.class));
+ agg = (EsqlAggregate) plan;
+ assertThat(((Literal) agg.aggregates().get(0).children().get(0).children().get(0)).value(), equalTo("*"));
+ assertThat(agg.child(), instanceOf(Eval.class));
+ assertThat(agg.children().size(), equalTo(1));
+ assertThat(agg.children().get(0), instanceOf(Eval.class));
+ eval = (Eval) agg.children().get(0);
+ assertThat(((Literal) ((Add) eval.fields().get(0).child()).left()).value(), equalTo(-1));
+ assertThat(((Literal) ((Add) eval.fields().get(0).child()).right()).value(), equalTo(100));
+ f = (Filter) eval.children().get(0);
+ assertThat(((Literal) f.condition().children().get(1)).value(), equalTo(5));
+ assertThat(f.children().size(), equalTo(1));
+ assertThat(f.children().get(0), instanceOf(EsqlUnresolvedRelation.class));
+
+ plan = statement(
+ "from test | where x < ?1 | eval y = ?2 + ?1 | stats count(?3) by z",
+ new QueryParams(
+ List.of(new QueryParam(null, 5, INTEGER), new QueryParam(null, -1, INTEGER), new QueryParam(null, "*", KEYWORD))
+ )
+ );
+ assertThat(plan, instanceOf(EsqlAggregate.class));
+ agg = (EsqlAggregate) plan;
+ assertThat(((Literal) agg.aggregates().get(0).children().get(0).children().get(0)).value(), equalTo("*"));
+ assertThat(agg.child(), instanceOf(Eval.class));
+ assertThat(agg.children().size(), equalTo(1));
+ assertThat(agg.children().get(0), instanceOf(Eval.class));
+ eval = (Eval) agg.children().get(0);
+ assertThat(((Literal) ((Add) eval.fields().get(0).child()).left()).value(), equalTo(-1));
+ assertThat(((Literal) ((Add) eval.fields().get(0).child()).right()).value(), equalTo(5));
+ f = (Filter) eval.children().get(0);
+ assertThat(((Literal) f.condition().children().get(1)).value(), equalTo(5));
+ assertThat(f.children().size(), equalTo(1));
+ assertThat(f.children().get(0), instanceOf(EsqlUnresolvedRelation.class));
+ }
+
+ public void testParamMixed() {
+ expectError(
+ "from test | where x < ? | eval y = ?n2 + ?n3 | limit ?n4",
+ List.of(
+ new QueryParam("n1", 5, INTEGER),
+ new QueryParam("n2", -1, INTEGER),
+ new QueryParam("n3", 100, INTEGER),
+ new QueryParam("n4", 10, INTEGER)
+ ),
+ "Inconsistent parameter declaration, "
+ + "use one of positional, named or anonymous params but not a combination of named and anonymous"
+ );
+
+ expectError(
+ "from test | where x < ?1 | eval y = ?n2 + ?n3 | limit ?n4",
+ List.of(
+ new QueryParam("n1", 5, INTEGER),
+ new QueryParam("n2", -1, INTEGER),
+ new QueryParam("n3", 100, INTEGER),
+ new QueryParam("n4", 10, INTEGER)
+ ),
+ "Inconsistent parameter declaration, "
+ + "use one of positional, named or anonymous params but not a combination of named and positional"
+ );
+
+ expectError(
+ "from test | where x < ? | eval y = ?2 + ?n3 | limit ?n4",
+ List.of(
+ new QueryParam("n1", 5, INTEGER),
+ new QueryParam("n2", -1, INTEGER),
+ new QueryParam("n3", 100, INTEGER),
+ new QueryParam("n4", 10, INTEGER)
+ ),
+ "Inconsistent parameter declaration, "
+ + "use one of positional, named or anonymous params but not a combination of positional and anonymous"
+ );
}
public void testFieldContainingDotsAndNumbers() {
@@ -1117,10 +1399,10 @@ public class StatementParserTests extends ESTestCase {
}
private LogicalPlan statement(String e) {
- return statement(e, List.of());
+ return statement(e, QueryParams.EMPTY);
}
- private LogicalPlan statement(String e, List params) {
+ private LogicalPlan statement(String e, QueryParams params) {
return parser.createStatement(e, params);
}
@@ -1200,8 +1482,12 @@ public class StatementParserTests extends ESTestCase {
assertThat(e.getMessage(), containsString(errorMessage));
}
- private void expectError(String query, List params, String errorMessage) {
- ParsingException e = expectThrows(ParsingException.class, "Expected syntax error for " + query, () -> statement(query, params));
+ private void expectError(String query, List params, String errorMessage) {
+ ParsingException e = expectThrows(
+ ParsingException.class,
+ "Expected syntax error for " + query,
+ () -> statement(query, new QueryParams(params))
+ );
assertThat(e.getMessage(), containsString(errorMessage));
}
}
diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml
index ab0261d91663..1c95e961d053 100644
--- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml
+++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/10_basic.yml
@@ -309,12 +309,12 @@ setup:
- match: {values.0: [1, 44, "green"]}
---
-"Test Mixed Input Params":
+"Test Unnamed Input Params":
- do:
esql.query:
body:
query: 'from test | eval x = ?, y = ?, z = ?, t = ?, u = ?, v = ? | keep x, y, z, t, u, v | limit 3'
- params: [{"value": 1, "type": "keyword"}, {"value": 2, "type": "double"}, null, true, 123, {"value": 123, "type": "long"}]
+ params: ["1", 2.0, null, true, 123, 1674835275193]
- length: {columns: 6}
- match: {columns.0.name: "x"}
@@ -330,9 +330,44 @@ setup:
- match: {columns.5.name: "v"}
- match: {columns.5.type: "long"}
- length: {values: 3}
- - match: {values.0: ["1",2.0,null,true,123,123]}
- - match: {values.1: ["1",2.0,null,true,123,123]}
- - match: {values.2: ["1",2.0,null,true,123,123]}
+ - match: {values.0: ["1",2.0,null,true,123,1674835275193]}
+ - match: {values.1: ["1",2.0,null,true,123,1674835275193]}
+ - match: {values.2: ["1",2.0,null,true,123,1674835275193]}
+
+---
+"Test Named Input Params":
+ - requires:
+ test_runner_features: [ capabilities ]
+ capabilities:
+ - method: POST
+ path: /_query
+ parameters: [ ]
+ capabilities: [ named_positional_parameter ]
+ reason: "named or positional parameters"
+
+ - do:
+ esql.query:
+ body:
+ query: 'from test | eval x = ?, y = ?, z = ?, t = ?, u = ?, v = ? | keep x, y, z, t, u, v | limit 3'
+ params: [{"n1" : "1"}, {"n2" : 2.0}, {"n3" : null}, {"n4" : true}, {"n5" : 123}, {"n6": 1674835275193}]
+
+ - length: {columns: 6}
+ - match: {columns.0.name: "x"}
+ - match: {columns.0.type: "keyword"}
+ - match: {columns.1.name: "y"}
+ - match: {columns.1.type: "double"}
+ - match: {columns.2.name: "z"}
+ - match: {columns.2.type: "null"}
+ - match: {columns.3.name: "t"}
+ - match: {columns.3.type: "boolean"}
+ - match: {columns.4.name: "u"}
+ - match: {columns.4.type: "integer"}
+ - match: {columns.5.name: "v"}
+ - match: {columns.5.type: "long"}
+ - length: {values: 3}
+ - match: {values.0: ["1",2.0,null,true,123,1674835275193]}
+ - match: {values.1: ["1",2.0,null,true,123,1674835275193]}
+ - match: {values.2: ["1",2.0,null,true,123,1674835275193]}
---
version is not allowed:
From 394d2b09a675603ff4255e9bf114eccec75db975 Mon Sep 17 00:00:00 2001
From: Liam Thompson <32779855+leemthompo@users.noreply.github.com>
Date: Tue, 11 Jun 2024 17:04:37 +0200
Subject: [PATCH 13/34] Revert "[DOCS] Remove ESQL demo env link from 8.14+
(#109562)" (#109579)
This reverts commit 0480c1acba41cb0cd8fb22c384adfff0790a2f85.
---
docs/reference/esql/esql-get-started.asciidoc | 8 ++++----
.../esql/esql-getting-started-sample-data.asciidoc | 2 +-
2 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/docs/reference/esql/esql-get-started.asciidoc b/docs/reference/esql/esql-get-started.asciidoc
index 207794c064da..b7928898a3bb 100644
--- a/docs/reference/esql/esql-get-started.asciidoc
+++ b/docs/reference/esql/esql-get-started.asciidoc
@@ -15,9 +15,10 @@ This getting started is also available as an https://github.com/elastic/elastics
[[esql-getting-started-prerequisites]]
=== Prerequisites
-To follow along with the queries in this guide, you'll need an {es} deployment with our sample data.
+To follow along with the queries in this guide, you can either set up your own
+deployment, or use Elastic's public {esql} demo environment.
-include::{es-ref-dir}/tab-widgets/esql/esql-getting-started-sample-data.asciidoc[tag=own-deployment]
+include::{es-ref-dir}/tab-widgets/esql/esql-getting-started-widget-sample-data.asciidoc[]
[discrete]
[[esql-getting-started-running-queries]]
@@ -268,8 +269,7 @@ Before you can use `ENRICH`, you first need to
<> and <>
an <>.
-include::{es-ref-dir}/tab-widgets/esql/esql-getting-started-enrich-policy.asciidoc[tag=own-deployment]
-
+include::{es-ref-dir}/tab-widgets/esql/esql-getting-started-widget-enrich-policy.asciidoc[]
After creating and executing a policy, you can use it with the `ENRICH`
command:
diff --git a/docs/reference/tab-widgets/esql/esql-getting-started-sample-data.asciidoc b/docs/reference/tab-widgets/esql/esql-getting-started-sample-data.asciidoc
index 97f4859e012a..d9b08b7281f7 100644
--- a/docs/reference/tab-widgets/esql/esql-getting-started-sample-data.asciidoc
+++ b/docs/reference/tab-widgets/esql/esql-getting-started-sample-data.asciidoc
@@ -1,6 +1,6 @@
// tag::own-deployment[]
-First, you'll need to ingest the sample data. In {kib}, open the main menu and select *Dev
+First ingest some sample data. In {kib}, open the main menu and select *Dev
Tools*. Run the following two requests:
[source,console]
From 2a193b53d84b65395003e5be9cb5502c764be8e4 Mon Sep 17 00:00:00 2001
From: David Turner
Date: Tue, 11 Jun 2024 17:23:25 +0100
Subject: [PATCH 14/34] Remove unused `BaseNodesRequest` broadcasts (#109512)
Several `TransportNodesAction` implementations send the top-level
request out to every node even though the nodes themselves don't use it.
This commit adjusts the transport protocol to drop the unnecessary data
on the wire.
Relates #100878
---
.../org/elasticsearch/TransportVersions.java | 1 +
.../cluster/node/info/NodesInfoRequest.java | 3 +++
.../cluster/node/stats/NodesStatsRequest.java | 4 +++-
.../cluster/stats/ClusterStatsRequest.java | 3 +++
.../stats/TransportClusterStatsAction.java | 20 ++++++++++---------
.../node/tasks/CancellableTasksTests.java | 9 ++-------
.../node/tasks/TaskManagerTestCase.java | 1 -
.../cluster/node/tasks/TestTaskPlugin.java | 10 +---------
.../node/tasks/TransportTasksActionTests.java | 7 +------
.../nodes/TransportNodesActionTests.java | 8 --------
.../action/GetInferenceDiagnosticsAction.java | 4 ----
.../action/TrainedModelCacheInfoAction.java | 3 +++
.../NodesDeprecationCheckAction.java | 18 +++++++++--------
.../NodesDeprecationCheckRequest.java | 4 ++++
.../TransportNodeDeprecationCheckAction.java | 2 +-
.../TransportTrainedModelCacheInfoAction.java | 19 ++++++++++--------
16 files changed, 54 insertions(+), 62 deletions(-)
diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java
index c2be2da12534..e85209c19d82 100644
--- a/server/src/main/java/org/elasticsearch/TransportVersions.java
+++ b/server/src/main/java/org/elasticsearch/TransportVersions.java
@@ -188,6 +188,7 @@ public class TransportVersions {
public static final TransportVersion RANK_DOC_IN_SHARD_FETCH_REQUEST = def(8_679_00_0);
public static final TransportVersion SECURITY_SETTINGS_REQUEST_TIMEOUTS = def(8_680_00_0);
public static final TransportVersion QUERY_RULE_CRUD_API_PUT = def(8_681_00_0);
+ public static final TransportVersion DROP_UNUSED_NODES_REQUESTS = def(8_682_00_0);
/*
* STOP! READ THIS FIRST! No, really,
diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java
index ebf01feaaa89..51699c1f7dcd 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java
@@ -11,6 +11,7 @@ package org.elasticsearch.action.admin.cluster.node.info;
import org.elasticsearch.action.support.nodes.BaseNodesRequest;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.core.UpdateForV9;
import java.io.IOException;
import java.util.Set;
@@ -30,6 +31,7 @@ public final class NodesInfoRequest extends BaseNodesRequest {
* @param in A stream input object.
* @throws IOException if the stream cannot be deserialized.
*/
+ @UpdateForV9 // this constructor is unused in v9
public NodesInfoRequest(StreamInput in) throws IOException {
super(in);
nodesInfoMetrics = new NodesInfoMetrics(in);
@@ -111,6 +113,7 @@ public final class NodesInfoRequest extends BaseNodesRequest {
return this;
}
+ @UpdateForV9 // this method can just call localOnly() in v9
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequest.java
index c441c6daf89b..ff88bc5fcf46 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequest.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequest.java
@@ -13,6 +13,7 @@ import org.elasticsearch.action.support.nodes.BaseNodesRequest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.core.UpdateForV9;
import org.elasticsearch.tasks.CancellableTask;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskId;
@@ -36,9 +37,9 @@ public class NodesStatsRequest extends BaseNodesRequest {
nodesStatsRequestParameters = new NodesStatsRequestParameters();
}
+ @UpdateForV9 // this constructor is unused in v9
public NodesStatsRequest(StreamInput in) throws IOException {
super(in);
-
nodesStatsRequestParameters = new NodesStatsRequestParameters(in);
}
@@ -178,6 +179,7 @@ public class NodesStatsRequest extends BaseNodesRequest {
nodesStatsRequestParameters.setIncludeShardsStats(includeShardsStats);
}
+ @UpdateForV9 // this method can just call localOnly() in v9
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java
index ca2ec4e5607e..bba669e07a70 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java
@@ -11,6 +11,7 @@ package org.elasticsearch.action.admin.cluster.stats;
import org.elasticsearch.action.support.nodes.BaseNodesRequest;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.core.UpdateForV9;
import org.elasticsearch.tasks.CancellableTask;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskId;
@@ -23,6 +24,7 @@ import java.util.Map;
*/
public class ClusterStatsRequest extends BaseNodesRequest {
+ @UpdateForV9 // this constructor is unused in v9
public ClusterStatsRequest(StreamInput in) throws IOException {
super(in);
}
@@ -40,6 +42,7 @@ public class ClusterStatsRequest extends BaseNodesRequest {
return new CancellableTask(id, type, action, "", parentTaskId, headers);
}
+ @UpdateForV9 // this method can just call localOnly() in v9
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java
index f1b6faaca439..2a8fecde7ee9 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java
@@ -9,6 +9,7 @@
package org.elasticsearch.action.admin.cluster.stats;
import org.apache.lucene.store.AlreadyClosedException;
+import org.elasticsearch.TransportVersions;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionType;
import org.elasticsearch.action.FailedNodeException;
@@ -32,6 +33,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.util.CancellableSingleObjectCache;
import org.elasticsearch.common.util.concurrent.ListenableFuture;
import org.elasticsearch.common.util.concurrent.ThreadContext;
+import org.elasticsearch.core.UpdateForV9;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.engine.CommitStats;
import org.elasticsearch.index.seqno.RetentionLeaseStats;
@@ -167,7 +169,7 @@ public class TransportClusterStatsAction extends TransportNodesAction<
@Override
protected ClusterStatsNodeRequest newNodeRequest(ClusterStatsRequest request) {
- return new ClusterStatsNodeRequest(request);
+ return new ClusterStatsNodeRequest();
}
@Override
@@ -251,18 +253,16 @@ public class TransportClusterStatsAction extends TransportNodesAction<
);
}
+ @UpdateForV9 // this can be replaced with TransportRequest.Empty in v9
public static class ClusterStatsNodeRequest extends TransportRequest {
- // TODO don't wrap the whole top-level request, it contains heavy and irrelevant DiscoveryNode things; see #100878
- ClusterStatsRequest request;
+ ClusterStatsNodeRequest() {}
public ClusterStatsNodeRequest(StreamInput in) throws IOException {
super(in);
- request = new ClusterStatsRequest(in);
- }
-
- ClusterStatsNodeRequest(ClusterStatsRequest request) {
- this.request = request;
+ if (in.getTransportVersion().before(TransportVersions.DROP_UNUSED_NODES_REQUESTS)) {
+ new ClusterStatsRequest(in);
+ }
}
@Override
@@ -273,7 +273,9 @@ public class TransportClusterStatsAction extends TransportNodesAction<
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
- request.writeTo(out);
+ if (out.getTransportVersion().before(TransportVersions.DROP_UNUSED_NODES_REQUESTS)) {
+ new ClusterStatsRequest().writeTo(out);
+ }
}
}
diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java
index 22953f9959c1..9883eec6896c 100644
--- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java
+++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java
@@ -100,12 +100,7 @@ public class CancellableTasksTests extends TaskManagerTestCase {
}
public static class CancellableNodesRequest extends BaseNodesRequest {
- private String requestName;
-
- private CancellableNodesRequest(StreamInput in) throws IOException {
- super(in);
- requestName = in.readString();
- }
+ private final String requestName;
public CancellableNodesRequest(String requestName, String... nodesIds) {
super(nodesIds);
@@ -147,7 +142,7 @@ public class CancellableTasksTests extends TaskManagerTestCase {
boolean shouldBlock,
CountDownLatch actionStartedLatch
) {
- super(actionName, threadPool, clusterService, transportService, CancellableNodesRequest::new, CancellableNodeRequest::new);
+ super(actionName, threadPool, clusterService, transportService, CancellableNodeRequest::new);
this.shouldBlock = shouldBlock;
this.actionStartedLatch = actionStartedLatch;
}
diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java
index f943ff14002c..f3450bbff669 100644
--- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java
+++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java
@@ -144,7 +144,6 @@ public abstract class TaskManagerTestCase extends ESTestCase {
ThreadPool threadPool,
ClusterService clusterService,
TransportService transportService,
- Writeable.Reader request,
Writeable.Reader nodeRequest
) {
super(
diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java
index 63629e16974d..6f345eb7dcda 100644
--- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java
+++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java
@@ -195,19 +195,11 @@ public class TestTaskPlugin extends Plugin implements ActionPlugin, NetworkPlugi
}
public static class NodesRequest extends BaseNodesRequest {
- private String requestName;
+ private final String requestName;
private boolean shouldStoreResult = false;
private boolean shouldBlock = true;
private boolean shouldFail = false;
- NodesRequest(StreamInput in) throws IOException {
- super(in);
- requestName = in.readString();
- shouldStoreResult = in.readBoolean();
- shouldBlock = in.readBoolean();
- shouldFail = in.readBoolean();
- }
-
NodesRequest(String requestName, String... nodesIds) {
super(nodesIds);
this.requestName = requestName;
diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java
index 6f4da1fe1ebe..969ed50685bc 100644
--- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java
+++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java
@@ -109,11 +109,6 @@ public class TransportTasksActionTests extends TaskManagerTestCase {
public static class NodesRequest extends BaseNodesRequest {
private final String requestName;
- NodesRequest(StreamInput in) throws IOException {
- super(in);
- requestName = in.readString();
- }
-
public NodesRequest(String requestName, String... nodesIds) {
super(nodesIds);
this.requestName = requestName;
@@ -142,7 +137,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase {
abstract class TestNodesAction extends AbstractTestNodesAction {
TestNodesAction(String actionName, ThreadPool threadPool, ClusterService clusterService, TransportService transportService) {
- super(actionName, threadPool, clusterService, transportService, NodesRequest::new, NodeRequest::new);
+ super(actionName, threadPool, clusterService, transportService, NodeRequest::new);
}
@Override
diff --git a/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java b/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java
index 689040f9b6c5..d0535665d368 100644
--- a/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java
+++ b/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java
@@ -323,11 +323,9 @@ public class TransportNodesActionTests extends ESTestCase {
public DataNodesOnlyTransportNodesAction getDataNodesOnlyTransportNodesAction(TransportService transportService) {
return new DataNodesOnlyTransportNodesAction(
- THREAD_POOL,
clusterService,
transportService,
new ActionFilters(Collections.emptySet()),
- TestNodesRequest::new,
TestNodeRequest::new,
THREAD_POOL.executor(ThreadPool.Names.GENERIC)
);
@@ -383,11 +381,9 @@ public class TransportNodesActionTests extends ESTestCase {
private static class DataNodesOnlyTransportNodesAction extends TestTransportNodesAction {
DataNodesOnlyTransportNodesAction(
- ThreadPool threadPool,
ClusterService clusterService,
TransportService transportService,
ActionFilters actionFilters,
- Writeable.Reader request,
Writeable.Reader nodeRequest,
Executor nodeExecutor
) {
@@ -401,10 +397,6 @@ public class TransportNodesActionTests extends ESTestCase {
}
private static class TestNodesRequest extends BaseNodesRequest {
- TestNodesRequest(StreamInput in) throws IOException {
- super(in);
- }
-
TestNodesRequest(String... nodesIds) {
super(nodesIds);
}
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceDiagnosticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceDiagnosticsAction.java
index 29edc88ecda7..00dcd5642401 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceDiagnosticsAction.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/GetInferenceDiagnosticsAction.java
@@ -43,10 +43,6 @@ public class GetInferenceDiagnosticsAction extends ActionType {
+
+ @UpdateForV9 // this constructor is unused in v9
public NodesDeprecationCheckRequest(StreamInput in) throws IOException {
super(in);
}
@@ -24,6 +27,7 @@ public class NodesDeprecationCheckRequest extends BaseNodesRequest
Date: Tue, 11 Jun 2024 17:34:16 +0100
Subject: [PATCH 15/34] Deprecate `BaseNodesRequest` reader ctor (#109513)
Relates #100878
---
.../action/support/nodes/BaseNodesRequest.java | 8 ++++++++
1 file changed, 8 insertions(+)
diff --git a/server/src/main/java/org/elasticsearch/action/support/nodes/BaseNodesRequest.java b/server/src/main/java/org/elasticsearch/action/support/nodes/BaseNodesRequest.java
index 8a2e7684cadf..626cdb8046f5 100644
--- a/server/src/main/java/org/elasticsearch/action/support/nodes/BaseNodesRequest.java
+++ b/server/src/main/java/org/elasticsearch/action/support/nodes/BaseNodesRequest.java
@@ -39,6 +39,14 @@ public abstract class BaseNodesRequest
private TimeValue timeout;
+ /**
+ * @deprecated {@link BaseNodesRequest} derivatives are quite heavyweight and should never need sending over the wire. Do not include
+ * the full top-level request directly in the node-level requests. Instead, copy the needed fields over to a dedicated node-level
+ * request.
+ *
+ * @see #100878
+ */
+ @Deprecated(forRemoval = true)
protected BaseNodesRequest(StreamInput in) throws IOException {
// A bare `BaseNodesRequest` is never sent over the wire, but several implementations send the full top-level request to each node
// (wrapped up in another request). They shouldn't, but until we fix that we must keep this. See #100878.
From dbf388cec745665df9e8b3a80914d452e786f079 Mon Sep 17 00:00:00 2001
From: David Turner
Date: Tue, 11 Jun 2024 18:06:43 +0100
Subject: [PATCH 16/34] Fix trappy reroute timeout (#109587)
Relates #107984
---
.../admin/indices/create/ShrinkIndexIT.java | 5 +++-
.../cluster/allocation/ClusterRerouteIT.java | 27 +++++++++++++------
.../cluster/routing/PrimaryAllocationIT.java | 2 +-
.../elasticsearch/recovery/RelocationIT.java | 2 +-
.../reroute/ClusterRerouteRequest.java | 5 ++--
.../reroute/ClusterRerouteRequestBuilder.java | 11 +++++++-
.../cluster/RestClusterRerouteAction.java | 4 +--
.../reroute/ClusterRerouteRequestTests.java | 15 ++++++-----
.../cluster/reroute/ClusterRerouteTests.java | 6 ++---
.../AbstractClientHeadersTestCase.java | 2 +-
.../metadata/AutoExpandReplicasTests.java | 2 +-
.../allocation/FailedNodeRoutingTests.java | 3 ++-
...ClusterStateServiceRandomUpdatesTests.java | 2 +-
.../snapshots/SnapshotResiliencyTests.java | 2 +-
.../cluster/reroute/ClusterRerouteUtils.java | 6 +++--
15 files changed, 60 insertions(+), 34 deletions(-)
diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java
index 6b2c7f0a9580..3712ad8c35f6 100644
--- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java
+++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java
@@ -398,7 +398,10 @@ public class ShrinkIndexIT extends ESIntegTestCase {
refreshClusterInfo();
// kick off a retry and wait until it's done!
final var clusterRerouteResponse = safeGet(
- client().execute(TransportClusterRerouteAction.TYPE, new ClusterRerouteRequest().setRetryFailed(true))
+ client().execute(
+ TransportClusterRerouteAction.TYPE,
+ new ClusterRerouteRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT).setRetryFailed(true)
+ )
);
long expectedShardSize = clusterRerouteResponse.getState().routingTable().index("target").shard(0).shard(0).getExpectedShardSize();
// we support the expected shard size in the allocator to sum up over the source index shards
diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java
index 5f75b62cabb6..7c13171ea76a 100644
--- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java
+++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java
@@ -104,7 +104,7 @@ public class ClusterRerouteIT extends ESIntegTestCase {
state = safeGet(
client().execute(
TransportClusterRerouteAction.TYPE,
- new ClusterRerouteRequest().explain(randomBoolean())
+ new ClusterRerouteRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT).explain(randomBoolean())
.add(new AllocateEmptyPrimaryAllocationCommand("test", 0, node_1, true))
.dryRun(true)
)
@@ -123,7 +123,8 @@ public class ClusterRerouteIT extends ESIntegTestCase {
state = safeGet(
client().execute(
TransportClusterRerouteAction.TYPE,
- new ClusterRerouteRequest().explain(randomBoolean()).add(new AllocateEmptyPrimaryAllocationCommand("test", 0, node_1, true))
+ new ClusterRerouteRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT).explain(randomBoolean())
+ .add(new AllocateEmptyPrimaryAllocationCommand("test", 0, node_1, true))
)
).getState();
assertThat(state.getRoutingNodes().unassigned().size(), equalTo(1));
@@ -151,7 +152,8 @@ public class ClusterRerouteIT extends ESIntegTestCase {
state = safeGet(
client().execute(
TransportClusterRerouteAction.TYPE,
- new ClusterRerouteRequest().explain(randomBoolean()).add(new MoveAllocationCommand("test", 0, node_1, node_2))
+ new ClusterRerouteRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT).explain(randomBoolean())
+ .add(new MoveAllocationCommand("test", 0, node_1, node_2))
)
).getState();
@@ -259,7 +261,8 @@ public class ClusterRerouteIT extends ESIntegTestCase {
state = safeGet(
client().execute(
TransportClusterRerouteAction.TYPE,
- new ClusterRerouteRequest().explain(randomBoolean()).add(new AllocateEmptyPrimaryAllocationCommand("test", 0, node_1, true))
+ new ClusterRerouteRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT).explain(randomBoolean())
+ .add(new AllocateEmptyPrimaryAllocationCommand("test", 0, node_1, true))
)
).getState();
assertThat(state.getRoutingNodes().unassigned().size(), equalTo(1));
@@ -311,7 +314,8 @@ public class ClusterRerouteIT extends ESIntegTestCase {
state = safeGet(
client().execute(
TransportClusterRerouteAction.TYPE,
- new ClusterRerouteRequest().explain(randomBoolean()).add(new AllocateEmptyPrimaryAllocationCommand("test", 0, node_1, true))
+ new ClusterRerouteRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT).explain(randomBoolean())
+ .add(new AllocateEmptyPrimaryAllocationCommand("test", 0, node_1, true))
)
).getState();
assertThat(state.getRoutingNodes().unassigned().size(), equalTo(1));
@@ -359,7 +363,10 @@ public class ClusterRerouteIT extends ESIntegTestCase {
logger.info("--> try to move the shard from node1 to node2");
MoveAllocationCommand cmd = new MoveAllocationCommand("test", 0, node_1, node_2);
ClusterRerouteResponse resp = safeGet(
- client().execute(TransportClusterRerouteAction.TYPE, new ClusterRerouteRequest().add(cmd).explain(true))
+ client().execute(
+ TransportClusterRerouteAction.TYPE,
+ new ClusterRerouteRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT).add(cmd).explain(true)
+ )
);
RoutingExplanations e = resp.getExplanations();
assertThat(e.explanations().size(), equalTo(1));
@@ -411,7 +418,9 @@ public class ClusterRerouteIT extends ESIntegTestCase {
ClusterRerouteResponse dryRunResponse = safeGet(
client().execute(
TransportClusterRerouteAction.TYPE,
- new ClusterRerouteRequest().explain(randomBoolean()).dryRun(true).add(dryRunAllocation)
+ new ClusterRerouteRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT).explain(randomBoolean())
+ .dryRun(true)
+ .add(dryRunAllocation)
)
);
@@ -445,7 +454,9 @@ public class ClusterRerouteIT extends ESIntegTestCase {
ClusterRerouteResponse response = safeGet(
client().execute(
TransportClusterRerouteAction.TYPE,
- new ClusterRerouteRequest().explain(true) // so we get a NO decision back rather than an exception
+ new ClusterRerouteRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT)
+ // set explain(true) so we get a NO decision back rather than an exception
+ .explain(true)
.add(yesDecisionAllocation)
.add(noDecisionAllocation)
)
diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java
index db84f2a0a34d..d97063454920 100644
--- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java
+++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java
@@ -260,7 +260,7 @@ public class PrimaryAllocationIT extends ESIntegTestCase {
TransportIndicesShardStoresAction.TYPE,
new IndicesShardStoresRequest(idxName)
).get().getStoreStatuses().get(idxName);
- final var rerouteRequest = new ClusterRerouteRequest();
+ final var rerouteRequest = new ClusterRerouteRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT);
for (Map.Entry> shardStoreStatuses : storeStatuses.entrySet()) {
int shardId = shardStoreStatuses.getKey();
IndicesShardStoresResponse.StoreStatus storeStatus = randomFrom(shardStoreStatuses.getValue());
diff --git a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java
index f0026dfa0a34..17daf403e056 100644
--- a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java
+++ b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java
@@ -611,7 +611,7 @@ public class RelocationIT extends ESIntegTestCase {
logger.info("--> relocate the shard from node1 to node2");
ActionFuture relocationListener = client().execute(
TransportClusterRerouteAction.TYPE,
- new ClusterRerouteRequest().add(new MoveAllocationCommand("test", 0, node1, node2))
+ new ClusterRerouteRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT).add(new MoveAllocationCommand("test", 0, node1, node2))
);
logger.info("--> index 100 docs while relocating");
for (int i = 20; i < 120; i++) {
diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequest.java
index 402f3666c2e8..b245a752524e 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequest.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequest.java
@@ -13,6 +13,7 @@ import org.elasticsearch.cluster.routing.allocation.command.AllocationCommand;
import org.elasticsearch.cluster.routing.allocation.command.AllocationCommands;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.core.TimeValue;
import java.io.IOException;
import java.util.Objects;
@@ -34,8 +35,8 @@ public class ClusterRerouteRequest extends AcknowledgedRequest {
public ClusterRerouteRequestBuilder(ElasticsearchClient client) {
- super(client, TransportClusterRerouteAction.TYPE, new ClusterRerouteRequest());
+ super(
+ client,
+ TransportClusterRerouteAction.TYPE,
+ new ClusterRerouteRequest(
+ MasterNodeRequest.TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT,
+ AcknowledgedRequest.DEFAULT_ACK_TIMEOUT
+ )
+ );
}
/**
diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterRerouteAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterRerouteAction.java
index 47d6198114fc..efd6f3e7b62c 100644
--- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterRerouteAction.java
+++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestClusterRerouteAction.java
@@ -100,12 +100,10 @@ public class RestClusterRerouteAction extends BaseRestHandler {
}
public static ClusterRerouteRequest createRequest(RestRequest request) throws IOException {
- ClusterRerouteRequest clusterRerouteRequest = new ClusterRerouteRequest();
+ final var clusterRerouteRequest = new ClusterRerouteRequest(getMasterNodeTimeout(request), getAckTimeout(request));
clusterRerouteRequest.dryRun(request.paramAsBoolean("dry_run", clusterRerouteRequest.dryRun()));
clusterRerouteRequest.explain(request.paramAsBoolean("explain", clusterRerouteRequest.explain()));
- clusterRerouteRequest.ackTimeout(getAckTimeout(request));
clusterRerouteRequest.setRetryFailed(request.paramAsBoolean("retry_failed", clusterRerouteRequest.isRetryFailed()));
- clusterRerouteRequest.masterNodeTimeout(getMasterNodeTimeout(request));
request.applyContentParser(parser -> PARSER.parse(parser, clusterRerouteRequest, null));
return clusterRerouteRequest;
}
diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java
index 7ccdb5da6d73..5c39275abbcf 100644
--- a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java
+++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java
@@ -21,8 +21,8 @@ import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.network.NetworkModule;
-import org.elasticsearch.core.TimeValue;
import org.elasticsearch.rest.RestRequest;
+import org.elasticsearch.rest.RestUtils;
import org.elasticsearch.rest.action.admin.cluster.RestClusterRerouteAction;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.rest.FakeRestRequest;
@@ -38,6 +38,7 @@ import java.util.List;
import java.util.Map;
import java.util.function.Supplier;
+import static org.elasticsearch.action.support.master.AcknowledgedRequest.DEFAULT_ACK_TIMEOUT;
import static org.elasticsearch.core.TimeValue.timeValueMillis;
import static org.elasticsearch.rest.RestUtils.REST_MASTER_TIMEOUT_PARAM;
@@ -80,7 +81,7 @@ public class ClusterRerouteRequestTests extends ESTestCase {
}
private ClusterRerouteRequest randomRequest() {
- ClusterRerouteRequest request = new ClusterRerouteRequest();
+ ClusterRerouteRequest request = new ClusterRerouteRequest(randomTimeValue(), randomTimeValue());
int commands = between(0, 10);
for (int i = 0; i < commands; i++) {
request.add(randomFrom(RANDOM_COMMAND_GENERATORS).get());
@@ -97,7 +98,7 @@ public class ClusterRerouteRequestTests extends ESTestCase {
assertEquals(request, request);
assertEquals(request.hashCode(), request.hashCode());
- ClusterRerouteRequest copy = new ClusterRerouteRequest().add(
+ ClusterRerouteRequest copy = new ClusterRerouteRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT).add(
request.getCommands().commands().toArray(new AllocationCommand[0])
);
AcknowledgedRequest clusterRerouteRequestAcknowledgedRequest = copy.dryRun(request.dryRun())
@@ -196,14 +197,14 @@ public class ClusterRerouteRequestTests extends ESTestCase {
builder.field("dry_run", original.dryRun());
}
params.put("explain", Boolean.toString(original.explain()));
- if (false == original.ackTimeout().equals(AcknowledgedRequest.DEFAULT_ACK_TIMEOUT) || randomBoolean()) {
- params.put("timeout", original.ackTimeout().toString());
+ if (false == original.ackTimeout().equals(DEFAULT_ACK_TIMEOUT) || randomBoolean()) {
+ params.put("timeout", original.ackTimeout().getStringRep());
}
if (original.isRetryFailed() || randomBoolean()) {
params.put("retry_failed", Boolean.toString(original.isRetryFailed()));
}
- if (false == original.masterNodeTimeout().equals(TimeValue.THIRTY_SECONDS) || randomBoolean()) {
- params.put(REST_MASTER_TIMEOUT_PARAM, original.masterNodeTimeout().toString());
+ if (false == original.masterNodeTimeout().equals(RestUtils.REST_MASTER_TIMEOUT_DEFAULT) || randomBoolean()) {
+ params.put(REST_MASTER_TIMEOUT_PARAM, original.masterNodeTimeout().getStringRep());
}
if (original.getCommands() != null) {
hasBody = true;
diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteTests.java
index 19c268100d4a..d2eb8d958bf8 100644
--- a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteTests.java
+++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteTests.java
@@ -51,7 +51,7 @@ import static org.hamcrest.Matchers.not;
public class ClusterRerouteTests extends ESAllocationTestCase {
public void testSerializeRequest() throws IOException {
- ClusterRerouteRequest req = new ClusterRerouteRequest();
+ ClusterRerouteRequest req = new ClusterRerouteRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT);
req.setRetryFailed(randomBoolean());
req.dryRun(randomBoolean());
req.explain(randomBoolean());
@@ -86,7 +86,7 @@ public class ClusterRerouteTests extends ESAllocationTestCase {
var responseRef = new AtomicReference();
var responseActionListener = ActionTestUtils.assertNoFailureListener(responseRef::set);
- var request = new ClusterRerouteRequest().dryRun(true);
+ var request = new ClusterRerouteRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT).dryRun(true);
var task = new TransportClusterRerouteAction.ClusterRerouteResponseAckedClusterStateUpdateTask(
logger,
allocationService,
@@ -112,7 +112,7 @@ public class ClusterRerouteTests extends ESAllocationTestCase {
);
ClusterState clusterState = createInitialClusterState(allocationService);
- var req = new ClusterRerouteRequest().dryRun(false);
+ var req = new ClusterRerouteRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT).dryRun(false);
var task = new TransportClusterRerouteAction.ClusterRerouteResponseAckedClusterStateUpdateTask(
logger,
allocationService,
diff --git a/server/src/test/java/org/elasticsearch/client/internal/AbstractClientHeadersTestCase.java b/server/src/test/java/org/elasticsearch/client/internal/AbstractClientHeadersTestCase.java
index 2ed69810c890..652e7f014b8e 100644
--- a/server/src/test/java/org/elasticsearch/client/internal/AbstractClientHeadersTestCase.java
+++ b/server/src/test/java/org/elasticsearch/client/internal/AbstractClientHeadersTestCase.java
@@ -121,7 +121,7 @@ public abstract class AbstractClientHeadersTestCase extends ESTestCase {
.execute(new AssertingActionListener<>(TransportCreateSnapshotAction.TYPE.name(), client.threadPool()));
client.execute(
TransportClusterRerouteAction.TYPE,
- new ClusterRerouteRequest(),
+ new ClusterRerouteRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT),
new AssertingActionListener<>(TransportClusterRerouteAction.TYPE.name(), client.threadPool())
);
diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/AutoExpandReplicasTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/AutoExpandReplicasTests.java
index 1ca7333c90a2..8a13d0cdc14f 100644
--- a/server/src/test/java/org/elasticsearch/cluster/metadata/AutoExpandReplicasTests.java
+++ b/server/src/test/java/org/elasticsearch/cluster/metadata/AutoExpandReplicasTests.java
@@ -142,7 +142,7 @@ public class AutoExpandReplicasTests extends ESTestCase {
state,
state.routingTable().index("index").shard(0).shardsWithState(ShardRoutingState.INITIALIZING)
);
- state = cluster.reroute(state, new ClusterRerouteRequest());
+ state = cluster.reroute(state, new ClusterRerouteRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT));
}
IndexShardRoutingTable preTable = state.routingTable().index("index").shard(0);
diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java
index d76e9912cef0..608c81417531 100644
--- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java
+++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java
@@ -123,7 +123,8 @@ public class FailedNodeRoutingTests extends ESAllocationTestCase {
for (int i = 0; i < randomIntBetween(4, 8); i++) {
DiscoveryNodes newNodes = DiscoveryNodes.builder(state.nodes()).add(createNode()).build();
state = ClusterState.builder(state).nodes(newNodes).build();
- state = cluster.reroute(state, new ClusterRerouteRequest()); // always reroute after adding node
+ // always reroute after adding node
+ state = cluster.reroute(state, new ClusterRerouteRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT));
}
// Log the node versions (for debugging if necessary)
diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java b/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java
index 1ace9a786cca..0ebcbff6bf86 100644
--- a/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java
+++ b/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java
@@ -441,7 +441,7 @@ public class IndicesClusterStateServiceRandomUpdatesTests extends AbstractIndice
// randomly reroute
if (rarely()) {
- state = cluster.reroute(state, new ClusterRerouteRequest());
+ state = cluster.reroute(state, new ClusterRerouteRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT));
}
// randomly start and fail allocated shards
diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java
index 8a5d282fe014..cbdb30652f96 100644
--- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java
+++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java
@@ -1052,7 +1052,7 @@ public class SnapshotResiliencyTests extends ESTestCase {
scheduleNow(
() -> testClusterNodes.randomMasterNodeSafe().client.execute(
TransportClusterRerouteAction.TYPE,
- new ClusterRerouteRequest().add(
+ new ClusterRerouteRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT).add(
new AllocateEmptyPrimaryAllocationCommand(
index,
shardRouting.shardId().id(),
diff --git a/test/framework/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteUtils.java b/test/framework/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteUtils.java
index 3da245ec430a..5bfff80e3b86 100644
--- a/test/framework/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteUtils.java
+++ b/test/framework/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteUtils.java
@@ -14,6 +14,7 @@ import org.elasticsearch.action.support.SubscribableListener;
import org.elasticsearch.client.internal.ElasticsearchClient;
import org.elasticsearch.cluster.routing.allocation.command.AllocationCommand;
+import static org.elasticsearch.test.ESTestCase.TEST_REQUEST_TIMEOUT;
import static org.elasticsearch.test.ESTestCase.asInstanceOf;
import static org.elasticsearch.test.ESTestCase.safeAwait;
import static org.elasticsearch.test.ESTestCase.safeGet;
@@ -45,7 +46,8 @@ public class ClusterRerouteUtils {
safeGet(
client.execute(
TransportClusterRerouteAction.TYPE,
- new ClusterRerouteRequest().setRetryFailed(retryFailed).add(allocationCommands)
+ new ClusterRerouteRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT).setRetryFailed(retryFailed)
+ .add(allocationCommands)
)
)
);
@@ -60,7 +62,7 @@ public class ClusterRerouteUtils {
SubscribableListener.newForked(
l -> client.execute(
TransportClusterRerouteAction.TYPE,
- new ClusterRerouteRequest().add(allocationCommands),
+ new ClusterRerouteRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT).add(allocationCommands),
ActionTestUtils.assertNoSuccessListener(l::onResponse)
)
)
From 53aefdd79d0a4c3d8d913a9c0472d316866f793f Mon Sep 17 00:00:00 2001
From: Benjamin Trent
Date: Tue, 11 Jun 2024 15:04:38 -0400
Subject: [PATCH 17/34] add hexstring support byte painless scorers (#109492)
Hexidecimal strings are supported for index input and for kNN queries. We should support them for byte vectors in painless.
This commit addresses this for our common scoring functions.
closes: #109412
---
docs/changelog/109492.yaml | 5 +
.../index.asciidoc | 8 +-
.../whitelist-json/painless-score.json | 2 +-
.../org.elasticsearch.script.score.txt | 8 +-
.../painless/145_dense_vector_byte_basic.yml | 61 ++++++++++
.../painless/155_dense_vector_byte_l1l2.yml | 61 ++++++++++
.../script/VectorScoreScriptUtils.java | 114 ++++++++++++++++--
.../script/VectorScoreScriptUtilsTests.java | 17 ++-
8 files changed, 253 insertions(+), 23 deletions(-)
create mode 100644 docs/changelog/109492.yaml
diff --git a/docs/changelog/109492.yaml b/docs/changelog/109492.yaml
new file mode 100644
index 000000000000..d4d1e83eb778
--- /dev/null
+++ b/docs/changelog/109492.yaml
@@ -0,0 +1,5 @@
+pr: 109492
+summary: Add hexstring support byte painless scorers
+area: Search
+type: bug
+issues: []
diff --git a/docs/painless/painless-api-reference/painless-api-reference-score/index.asciidoc b/docs/painless/painless-api-reference/painless-api-reference-score/index.asciidoc
index e91b8026dc33..775c0cc21242 100644
--- a/docs/painless/painless-api-reference/painless-api-reference-score/index.asciidoc
+++ b/docs/painless/painless-api-reference/painless-api-reference-score/index.asciidoc
@@ -10,7 +10,7 @@ The following specialized API is available in the Score context.
==== Static Methods
The following methods are directly callable without a class/instance qualifier. Note parameters denoted by a (*) are treated as read-only values.
-* double cosineSimilarity(List *, String *)
+* double cosineSimilarity(Object *, String *)
* double decayDateExp(String *, String *, String *, double *, ZonedDateTime)
* double decayDateGauss(String *, String *, String *, double *, ZonedDateTime)
* double decayDateLinear(String *, String *, String *, double *, ZonedDateTime)
@@ -20,9 +20,9 @@ The following methods are directly callable without a class/instance qualifier.
* double decayNumericExp(double *, double *, double *, double *, double)
* double decayNumericGauss(double *, double *, double *, double *, double)
* double decayNumericLinear(double *, double *, double *, double *, double)
-* double dotProduct(List *, String *)
-* double l1norm(List *, String *)
-* double l2norm(List *, String *)
+* double dotProduct(Object *, String *)
+* double l1norm(Object *, String *)
+* double l2norm(Object *, String *)
* double randomScore(int *)
* double randomScore(int *, String *)
* double saturation(double, double)
diff --git a/modules/lang-painless/src/main/generated/whitelist-json/painless-score.json b/modules/lang-painless/src/main/generated/whitelist-json/painless-score.json
index 56fe66d849b8..da9f7f7b6038 100644
--- a/modules/lang-painless/src/main/generated/whitelist-json/painless-score.json
+++ b/modules/lang-painless/src/main/generated/whitelist-json/painless-score.json
@@ -1 +1 @@
-{"name":"score","classes":[{"name":"String","imported":true,"constructors":[{"declaring":"String","parameters":[]}],"static_methods":[{"declaring":"String","name":"copyValueOf","return":"String","parameters":["char[]"]},{"declaring":"String","name":"copyValueOf","return":"String","parameters":["char[]","int","int"]},{"declaring":"String","name":"format","return":"String","parameters":["String","def[]"]},{"declaring":"String","name":"format","return":"String","parameters":["Locale","String","def[]"]},{"declaring":"String","name":"join","return":"String","parameters":["CharSequence","Iterable"]},{"declaring":"String","name":"valueOf","return":"String","parameters":["def"]}],"methods":[{"declaring":"CharSequence","name":"charAt","return":"char","parameters":["int"]},{"declaring":"CharSequence","name":"chars","return":"IntStream","parameters":[]},{"declaring":"String","name":"codePointAt","return":"int","parameters":["int"]},{"declaring":"String","name":"codePointBefore","return":"int","parameters":["int"]},{"declaring":"String","name":"codePointCount","return":"int","parameters":["int","int"]},{"declaring":"CharSequence","name":"codePoints","return":"IntStream","parameters":[]},{"declaring":"String","name":"compareTo","return":"int","parameters":["String"]},{"declaring":"String","name":"compareToIgnoreCase","return":"int","parameters":["String"]},{"declaring":"String","name":"concat","return":"String","parameters":["String"]},{"declaring":"String","name":"contains","return":"boolean","parameters":["CharSequence"]},{"declaring":"String","name":"contentEquals","return":"boolean","parameters":["CharSequence"]},{"declaring":null,"name":"decodeBase64","return":"String","parameters":[]},{"declaring":null,"name":"encodeBase64","return":"String","parameters":[]},{"declaring":"String","name":"endsWith","return":"boolean","parameters":["String"]},{"declaring":"Object","name":"equals","return":"boolean","parameters":["Object"]},{"declaring":"String","name":"equalsIgnoreCase","return":"boolean","parameters":["String"]},{"declaring":"String","name":"getChars","return":"void","parameters":["int","int","char[]","int"]},{"declaring":"Object","name":"hashCode","return":"int","parameters":[]},{"declaring":"String","name":"indexOf","return":"int","parameters":["String"]},{"declaring":"String","name":"indexOf","return":"int","parameters":["String","int"]},{"declaring":"String","name":"isEmpty","return":"boolean","parameters":[]},{"declaring":"String","name":"lastIndexOf","return":"int","parameters":["String"]},{"declaring":"String","name":"lastIndexOf","return":"int","parameters":["String","int"]},{"declaring":"CharSequence","name":"length","return":"int","parameters":[]},{"declaring":"String","name":"offsetByCodePoints","return":"int","parameters":["int","int"]},{"declaring":"String","name":"regionMatches","return":"boolean","parameters":["int","String","int","int"]},{"declaring":"String","name":"regionMatches","return":"boolean","parameters":["boolean","int","String","int","int"]},{"declaring":"String","name":"replace","return":"String","parameters":["CharSequence","CharSequence"]},{"declaring":null,"name":"replaceAll","return":"String","parameters":["Pattern","Function"]},{"declaring":null,"name":"replaceFirst","return":"String","parameters":["Pattern","Function"]},{"declaring":null,"name":"splitOnToken","return":"String[]","parameters":["String"]},{"declaring":null,"name":"splitOnToken","return":"String[]","parameters":["String","int"]},{"declaring":"String","name":"startsWith","return":"boolean","parameters":["String"]},{"declaring":"String","name":"startsWith","return":"boolean","parameters":["String","int"]},{"declaring":"CharSequence","name":"subSequence","return":"CharSequence","parameters":["int","int"]},{"declaring":"String","name":"substring","return":"String","parameters":["int"]},{"declaring":"String","name":"substring","return":"String","parameters":["int","int"]},{"declaring":"String","name":"toCharArray","return":"char[]","parameters":[]},{"declaring":"String","name":"toLowerCase","return":"String","parameters":[]},{"declaring":"String","name":"toLowerCase","return":"String","parameters":["Locale"]},{"declaring":"CharSequence","name":"toString","return":"String","parameters":[]},{"declaring":"String","name":"toUpperCase","return":"String","parameters":[]},{"declaring":"String","name":"toUpperCase","return":"String","parameters":["Locale"]},{"declaring":"String","name":"trim","return":"String","parameters":[]}],"static_fields":[],"fields":[]},{"name":"DenseVectorScriptDocValues","imported":true,"constructors":[],"static_methods":[],"methods":[{"declaring":"Collection","name":"add","return":"boolean","parameters":["def"]},{"declaring":"List","name":"add","return":"void","parameters":["int","def"]},{"declaring":"Collection","name":"addAll","return":"boolean","parameters":["Collection"]},{"declaring":"List","name":"addAll","return":"boolean","parameters":["int","Collection"]},{"declaring":null,"name":"any","return":"boolean","parameters":["Predicate"]},{"declaring":null,"name":"asCollection","return":"Collection","parameters":[]},{"declaring":null,"name":"asList","return":"List","parameters":[]},{"declaring":"Collection","name":"clear","return":"void","parameters":[]},{"declaring":null,"name":"collect","return":"List","parameters":["Function"]},{"declaring":null,"name":"collect","return":"def","parameters":["Collection","Function"]},{"declaring":"Collection","name":"contains","return":"boolean","parameters":["def"]},{"declaring":"Collection","name":"containsAll","return":"boolean","parameters":["Collection"]},{"declaring":null,"name":"each","return":"def","parameters":["Consumer"]},{"declaring":null,"name":"eachWithIndex","return":"def","parameters":["ObjIntConsumer"]},{"declaring":"List","name":"equals","return":"boolean","parameters":["Object"]},{"declaring":null,"name":"every","return":"boolean","parameters":["Predicate"]},{"declaring":null,"name":"find","return":"def","parameters":["Predicate"]},{"declaring":null,"name":"findAll","return":"List","parameters":["Predicate"]},{"declaring":null,"name":"findResult","return":"def","parameters":["Function"]},{"declaring":null,"name":"findResult","return":"def","parameters":["def","Function"]},{"declaring":null,"name":"findResults","return":"List","parameters":["Function"]},{"declaring":"Iterable","name":"forEach","return":"void","parameters":["Consumer"]},{"declaring":"List","name":"get","return":"def","parameters":["int"]},{"declaring":null,"name":"getByPath","return":"Object","parameters":["String"]},{"declaring":null,"name":"getByPath","return":"Object","parameters":["String","Object"]},{"declaring":null,"name":"getLength","return":"int","parameters":[]},{"declaring":null,"name":"groupBy","return":"Map","parameters":["Function"]},{"declaring":"List","name":"hashCode","return":"int","parameters":[]},{"declaring":"List","name":"indexOf","return":"int","parameters":["def"]},{"declaring":"Collection","name":"isEmpty","return":"boolean","parameters":[]},{"declaring":"Iterable","name":"iterator","return":"Iterator","parameters":[]},{"declaring":null,"name":"join","return":"String","parameters":["String"]},{"declaring":"List","name":"lastIndexOf","return":"int","parameters":["def"]},{"declaring":"List","name":"listIterator","return":"ListIterator","parameters":[]},{"declaring":"List","name":"listIterator","return":"ListIterator","parameters":["int"]},{"declaring":"List","name":"remove","return":"def","parameters":["int"]},{"declaring":"Collection","name":"removeAll","return":"boolean","parameters":["Collection"]},{"declaring":"Collection","name":"removeIf","return":"boolean","parameters":["Predicate"]},{"declaring":"List","name":"replaceAll","return":"void","parameters":["UnaryOperator"]},{"declaring":"Collection","name":"retainAll","return":"boolean","parameters":["Collection"]},{"declaring":"List","name":"set","return":"def","parameters":["int","def"]},{"declaring":"Collection","name":"size","return":"int","parameters":[]},{"declaring":"List","name":"sort","return":"void","parameters":["Comparator"]},{"declaring":null,"name":"split","return":"List","parameters":["Predicate"]},{"declaring":"Collection","name":"spliterator","return":"Spliterator","parameters":[]},{"declaring":"Collection","name":"stream","return":"Stream","parameters":[]},{"declaring":"List","name":"subList","return":"List","parameters":["int","int"]},{"declaring":null,"name":"sum","return":"double","parameters":[]},{"declaring":null,"name":"sum","return":"double","parameters":["ToDoubleFunction"]},{"declaring":"Collection","name":"toArray","return":"def[]","parameters":[]},{"declaring":"Collection","name":"toArray","return":"def[]","parameters":["def[]"]},{"declaring":"Object","name":"toString","return":"String","parameters":[]}],"static_fields":[],"fields":[]},{"name":"VersionScriptDocValues","imported":true,"constructors":[],"static_methods":[],"methods":[{"declaring":"Collection","name":"add","return":"boolean","parameters":["def"]},{"declaring":"List","name":"add","return":"void","parameters":["int","def"]},{"declaring":"Collection","name":"addAll","return":"boolean","parameters":["Collection"]},{"declaring":"List","name":"addAll","return":"boolean","parameters":["int","Collection"]},{"declaring":null,"name":"any","return":"boolean","parameters":["Predicate"]},{"declaring":null,"name":"asCollection","return":"Collection","parameters":[]},{"declaring":null,"name":"asList","return":"List","parameters":[]},{"declaring":"Collection","name":"clear","return":"void","parameters":[]},{"declaring":null,"name":"collect","return":"List","parameters":["Function"]},{"declaring":null,"name":"collect","return":"def","parameters":["Collection","Function"]},{"declaring":"Collection","name":"contains","return":"boolean","parameters":["def"]},{"declaring":"Collection","name":"containsAll","return":"boolean","parameters":["Collection"]},{"declaring":null,"name":"each","return":"def","parameters":["Consumer"]},{"declaring":null,"name":"eachWithIndex","return":"def","parameters":["ObjIntConsumer"]},{"declaring":"List","name":"equals","return":"boolean","parameters":["Object"]},{"declaring":null,"name":"every","return":"boolean","parameters":["Predicate"]},{"declaring":null,"name":"find","return":"def","parameters":["Predicate"]},{"declaring":null,"name":"findAll","return":"List","parameters":["Predicate"]},{"declaring":null,"name":"findResult","return":"def","parameters":["Function"]},{"declaring":null,"name":"findResult","return":"def","parameters":["def","Function"]},{"declaring":null,"name":"findResults","return":"List","parameters":["Function"]},{"declaring":"Iterable","name":"forEach","return":"void","parameters":["Consumer"]},{"declaring":"VersionScriptDocValues","name":"get","return":"String","parameters":["int"]},{"declaring":null,"name":"getByPath","return":"Object","parameters":["String"]},{"declaring":null,"name":"getByPath","return":"Object","parameters":["String","Object"]},{"declaring":null,"name":"getLength","return":"int","parameters":[]},{"declaring":"VersionScriptDocValues","name":"getValue","return":"String","parameters":[]},{"declaring":null,"name":"groupBy","return":"Map","parameters":["Function"]},{"declaring":"List","name":"hashCode","return":"int","parameters":[]},{"declaring":"List","name":"indexOf","return":"int","parameters":["def"]},{"declaring":"Collection","name":"isEmpty","return":"boolean","parameters":[]},{"declaring":"Iterable","name":"iterator","return":"Iterator","parameters":[]},{"declaring":null,"name":"join","return":"String","parameters":["String"]},{"declaring":"List","name":"lastIndexOf","return":"int","parameters":["def"]},{"declaring":"List","name":"listIterator","return":"ListIterator","parameters":[]},{"declaring":"List","name":"listIterator","return":"ListIterator","parameters":["int"]},{"declaring":"List","name":"remove","return":"def","parameters":["int"]},{"declaring":"Collection","name":"removeAll","return":"boolean","parameters":["Collection"]},{"declaring":"Collection","name":"removeIf","return":"boolean","parameters":["Predicate"]},{"declaring":"List","name":"replaceAll","return":"void","parameters":["UnaryOperator"]},{"declaring":"Collection","name":"retainAll","return":"boolean","parameters":["Collection"]},{"declaring":"List","name":"set","return":"def","parameters":["int","def"]},{"declaring":"Collection","name":"size","return":"int","parameters":[]},{"declaring":"List","name":"sort","return":"void","parameters":["Comparator"]},{"declaring":null,"name":"split","return":"List","parameters":["Predicate"]},{"declaring":"Collection","name":"spliterator","return":"Spliterator","parameters":[]},{"declaring":"Collection","name":"stream","return":"Stream","parameters":[]},{"declaring":"List","name":"subList","return":"List","parameters":["int","int"]},{"declaring":null,"name":"sum","return":"double","parameters":[]},{"declaring":null,"name":"sum","return":"double","parameters":["ToDoubleFunction"]},{"declaring":"Collection","name":"toArray","return":"def[]","parameters":[]},{"declaring":"Collection","name":"toArray","return":"def[]","parameters":["def[]"]},{"declaring":"Object","name":"toString","return":"String","parameters":[]}],"static_fields":[],"fields":[]}],"imported_methods":[{"declaring":null,"name":"saturation","return":"double","parameters":["double","double"]},{"declaring":null,"name":"sigmoid","return":"double","parameters":["double","double","double"]}],"class_bindings":[{"declaring":"org.elasticsearch.script.VectorScoreScriptUtils$CosineSimilarity","name":"cosineSimilarity","return":"double","read_only":3,"parameters":["org.elasticsearch.script.ScoreScript","java.util.List","java.lang.String"]},{"declaring":"org.elasticsearch.script.ScoreScriptUtils$DecayDateExp","name":"decayDateExp","return":"double","read_only":4,"parameters":["java.lang.String","java.lang.String","java.lang.String","double","org.elasticsearch.script.JodaCompatibleZonedDateTime"]},{"declaring":"org.elasticsearch.script.ScoreScriptUtils$DecayDateGauss","name":"decayDateGauss","return":"double","read_only":4,"parameters":["java.lang.String","java.lang.String","java.lang.String","double","org.elasticsearch.script.JodaCompatibleZonedDateTime"]},{"declaring":"org.elasticsearch.script.ScoreScriptUtils$DecayDateLinear","name":"decayDateLinear","return":"double","read_only":4,"parameters":["java.lang.String","java.lang.String","java.lang.String","double","org.elasticsearch.script.JodaCompatibleZonedDateTime"]},{"declaring":"org.elasticsearch.script.ScoreScriptUtils$DecayGeoExp","name":"decayGeoExp","return":"double","read_only":4,"parameters":["java.lang.String","java.lang.String","java.lang.String","double","org.elasticsearch.common.geo.GeoPoint"]},{"declaring":"org.elasticsearch.script.ScoreScriptUtils$DecayGeoGauss","name":"decayGeoGauss","return":"double","read_only":4,"parameters":["java.lang.String","java.lang.String","java.lang.String","double","org.elasticsearch.common.geo.GeoPoint"]},{"declaring":"org.elasticsearch.script.ScoreScriptUtils$DecayGeoLinear","name":"decayGeoLinear","return":"double","read_only":4,"parameters":["java.lang.String","java.lang.String","java.lang.String","double","org.elasticsearch.common.geo.GeoPoint"]},{"declaring":"org.elasticsearch.script.ScoreScriptUtils$DecayNumericExp","name":"decayNumericExp","return":"double","read_only":4,"parameters":["double","double","double","double","double"]},{"declaring":"org.elasticsearch.script.ScoreScriptUtils$DecayNumericGauss","name":"decayNumericGauss","return":"double","read_only":4,"parameters":["double","double","double","double","double"]},{"declaring":"org.elasticsearch.script.ScoreScriptUtils$DecayNumericLinear","name":"decayNumericLinear","return":"double","read_only":4,"parameters":["double","double","double","double","double"]},{"declaring":"org.elasticsearch.script.VectorScoreScriptUtils$DotProduct","name":"dotProduct","return":"double","read_only":3,"parameters":["org.elasticsearch.script.ScoreScript","java.util.List","java.lang.String"]},{"declaring":"org.elasticsearch.script.VectorScoreScriptUtils$L1Norm","name":"l1norm","return":"double","read_only":3,"parameters":["org.elasticsearch.script.ScoreScript","java.util.List","java.lang.String"]},{"declaring":"org.elasticsearch.script.VectorScoreScriptUtils$L2Norm","name":"l2norm","return":"double","read_only":3,"parameters":["org.elasticsearch.script.ScoreScript","java.util.List","java.lang.String"]},{"declaring":"org.elasticsearch.script.ScoreScriptUtils$RandomScoreDoc","name":"randomScore","return":"double","read_only":2,"parameters":["org.elasticsearch.script.ScoreScript","int"]},{"declaring":"org.elasticsearch.script.ScoreScriptUtils$RandomScoreField","name":"randomScore","return":"double","read_only":3,"parameters":["org.elasticsearch.script.ScoreScript","int","java.lang.String"]}],"instance_bindings":[]}
+{"name":"score","classes":[{"name":"String","imported":true,"constructors":[{"declaring":"String","parameters":[]}],"static_methods":[{"declaring":"String","name":"copyValueOf","return":"String","parameters":["char[]"]},{"declaring":"String","name":"copyValueOf","return":"String","parameters":["char[]","int","int"]},{"declaring":"String","name":"format","return":"String","parameters":["String","def[]"]},{"declaring":"String","name":"format","return":"String","parameters":["Locale","String","def[]"]},{"declaring":"String","name":"join","return":"String","parameters":["CharSequence","Iterable"]},{"declaring":"String","name":"valueOf","return":"String","parameters":["def"]}],"methods":[{"declaring":"CharSequence","name":"charAt","return":"char","parameters":["int"]},{"declaring":"CharSequence","name":"chars","return":"IntStream","parameters":[]},{"declaring":"String","name":"codePointAt","return":"int","parameters":["int"]},{"declaring":"String","name":"codePointBefore","return":"int","parameters":["int"]},{"declaring":"String","name":"codePointCount","return":"int","parameters":["int","int"]},{"declaring":"CharSequence","name":"codePoints","return":"IntStream","parameters":[]},{"declaring":"String","name":"compareTo","return":"int","parameters":["String"]},{"declaring":"String","name":"compareToIgnoreCase","return":"int","parameters":["String"]},{"declaring":"String","name":"concat","return":"String","parameters":["String"]},{"declaring":"String","name":"contains","return":"boolean","parameters":["CharSequence"]},{"declaring":"String","name":"contentEquals","return":"boolean","parameters":["CharSequence"]},{"declaring":null,"name":"decodeBase64","return":"String","parameters":[]},{"declaring":null,"name":"encodeBase64","return":"String","parameters":[]},{"declaring":"String","name":"endsWith","return":"boolean","parameters":["String"]},{"declaring":"Object","name":"equals","return":"boolean","parameters":["Object"]},{"declaring":"String","name":"equalsIgnoreCase","return":"boolean","parameters":["String"]},{"declaring":"String","name":"getChars","return":"void","parameters":["int","int","char[]","int"]},{"declaring":"Object","name":"hashCode","return":"int","parameters":[]},{"declaring":"String","name":"indexOf","return":"int","parameters":["String"]},{"declaring":"String","name":"indexOf","return":"int","parameters":["String","int"]},{"declaring":"String","name":"isEmpty","return":"boolean","parameters":[]},{"declaring":"String","name":"lastIndexOf","return":"int","parameters":["String"]},{"declaring":"String","name":"lastIndexOf","return":"int","parameters":["String","int"]},{"declaring":"CharSequence","name":"length","return":"int","parameters":[]},{"declaring":"String","name":"offsetByCodePoints","return":"int","parameters":["int","int"]},{"declaring":"String","name":"regionMatches","return":"boolean","parameters":["int","String","int","int"]},{"declaring":"String","name":"regionMatches","return":"boolean","parameters":["boolean","int","String","int","int"]},{"declaring":"String","name":"replace","return":"String","parameters":["CharSequence","CharSequence"]},{"declaring":null,"name":"replaceAll","return":"String","parameters":["Pattern","Function"]},{"declaring":null,"name":"replaceFirst","return":"String","parameters":["Pattern","Function"]},{"declaring":null,"name":"splitOnToken","return":"String[]","parameters":["String"]},{"declaring":null,"name":"splitOnToken","return":"String[]","parameters":["String","int"]},{"declaring":"String","name":"startsWith","return":"boolean","parameters":["String"]},{"declaring":"String","name":"startsWith","return":"boolean","parameters":["String","int"]},{"declaring":"CharSequence","name":"subSequence","return":"CharSequence","parameters":["int","int"]},{"declaring":"String","name":"substring","return":"String","parameters":["int"]},{"declaring":"String","name":"substring","return":"String","parameters":["int","int"]},{"declaring":"String","name":"toCharArray","return":"char[]","parameters":[]},{"declaring":"String","name":"toLowerCase","return":"String","parameters":[]},{"declaring":"String","name":"toLowerCase","return":"String","parameters":["Locale"]},{"declaring":"CharSequence","name":"toString","return":"String","parameters":[]},{"declaring":"String","name":"toUpperCase","return":"String","parameters":[]},{"declaring":"String","name":"toUpperCase","return":"String","parameters":["Locale"]},{"declaring":"String","name":"trim","return":"String","parameters":[]}],"static_fields":[],"fields":[]},{"name":"DenseVectorScriptDocValues","imported":true,"constructors":[],"static_methods":[],"methods":[{"declaring":"Collection","name":"add","return":"boolean","parameters":["def"]},{"declaring":"List","name":"add","return":"void","parameters":["int","def"]},{"declaring":"Collection","name":"addAll","return":"boolean","parameters":["Collection"]},{"declaring":"List","name":"addAll","return":"boolean","parameters":["int","Collection"]},{"declaring":null,"name":"any","return":"boolean","parameters":["Predicate"]},{"declaring":null,"name":"asCollection","return":"Collection","parameters":[]},{"declaring":null,"name":"asList","return":"List","parameters":[]},{"declaring":"Collection","name":"clear","return":"void","parameters":[]},{"declaring":null,"name":"collect","return":"List","parameters":["Function"]},{"declaring":null,"name":"collect","return":"def","parameters":["Collection","Function"]},{"declaring":"Collection","name":"contains","return":"boolean","parameters":["def"]},{"declaring":"Collection","name":"containsAll","return":"boolean","parameters":["Collection"]},{"declaring":null,"name":"each","return":"def","parameters":["Consumer"]},{"declaring":null,"name":"eachWithIndex","return":"def","parameters":["ObjIntConsumer"]},{"declaring":"List","name":"equals","return":"boolean","parameters":["Object"]},{"declaring":null,"name":"every","return":"boolean","parameters":["Predicate"]},{"declaring":null,"name":"find","return":"def","parameters":["Predicate"]},{"declaring":null,"name":"findAll","return":"List","parameters":["Predicate"]},{"declaring":null,"name":"findResult","return":"def","parameters":["Function"]},{"declaring":null,"name":"findResult","return":"def","parameters":["def","Function"]},{"declaring":null,"name":"findResults","return":"List","parameters":["Function"]},{"declaring":"Iterable","name":"forEach","return":"void","parameters":["Consumer"]},{"declaring":"List","name":"get","return":"def","parameters":["int"]},{"declaring":null,"name":"getByPath","return":"Object","parameters":["String"]},{"declaring":null,"name":"getByPath","return":"Object","parameters":["String","Object"]},{"declaring":null,"name":"getLength","return":"int","parameters":[]},{"declaring":null,"name":"groupBy","return":"Map","parameters":["Function"]},{"declaring":"List","name":"hashCode","return":"int","parameters":[]},{"declaring":"List","name":"indexOf","return":"int","parameters":["def"]},{"declaring":"Collection","name":"isEmpty","return":"boolean","parameters":[]},{"declaring":"Iterable","name":"iterator","return":"Iterator","parameters":[]},{"declaring":null,"name":"join","return":"String","parameters":["String"]},{"declaring":"List","name":"lastIndexOf","return":"int","parameters":["def"]},{"declaring":"List","name":"listIterator","return":"ListIterator","parameters":[]},{"declaring":"List","name":"listIterator","return":"ListIterator","parameters":["int"]},{"declaring":"List","name":"remove","return":"def","parameters":["int"]},{"declaring":"Collection","name":"removeAll","return":"boolean","parameters":["Collection"]},{"declaring":"Collection","name":"removeIf","return":"boolean","parameters":["Predicate"]},{"declaring":"List","name":"replaceAll","return":"void","parameters":["UnaryOperator"]},{"declaring":"Collection","name":"retainAll","return":"boolean","parameters":["Collection"]},{"declaring":"List","name":"set","return":"def","parameters":["int","def"]},{"declaring":"Collection","name":"size","return":"int","parameters":[]},{"declaring":"List","name":"sort","return":"void","parameters":["Comparator"]},{"declaring":null,"name":"split","return":"List","parameters":["Predicate"]},{"declaring":"Collection","name":"spliterator","return":"Spliterator","parameters":[]},{"declaring":"Collection","name":"stream","return":"Stream","parameters":[]},{"declaring":"List","name":"subList","return":"List","parameters":["int","int"]},{"declaring":null,"name":"sum","return":"double","parameters":[]},{"declaring":null,"name":"sum","return":"double","parameters":["ToDoubleFunction"]},{"declaring":"Collection","name":"toArray","return":"def[]","parameters":[]},{"declaring":"Collection","name":"toArray","return":"def[]","parameters":["def[]"]},{"declaring":"Object","name":"toString","return":"String","parameters":[]}],"static_fields":[],"fields":[]},{"name":"VersionScriptDocValues","imported":true,"constructors":[],"static_methods":[],"methods":[{"declaring":"Collection","name":"add","return":"boolean","parameters":["def"]},{"declaring":"List","name":"add","return":"void","parameters":["int","def"]},{"declaring":"Collection","name":"addAll","return":"boolean","parameters":["Collection"]},{"declaring":"List","name":"addAll","return":"boolean","parameters":["int","Collection"]},{"declaring":null,"name":"any","return":"boolean","parameters":["Predicate"]},{"declaring":null,"name":"asCollection","return":"Collection","parameters":[]},{"declaring":null,"name":"asList","return":"List","parameters":[]},{"declaring":"Collection","name":"clear","return":"void","parameters":[]},{"declaring":null,"name":"collect","return":"List","parameters":["Function"]},{"declaring":null,"name":"collect","return":"def","parameters":["Collection","Function"]},{"declaring":"Collection","name":"contains","return":"boolean","parameters":["def"]},{"declaring":"Collection","name":"containsAll","return":"boolean","parameters":["Collection"]},{"declaring":null,"name":"each","return":"def","parameters":["Consumer"]},{"declaring":null,"name":"eachWithIndex","return":"def","parameters":["ObjIntConsumer"]},{"declaring":"List","name":"equals","return":"boolean","parameters":["Object"]},{"declaring":null,"name":"every","return":"boolean","parameters":["Predicate"]},{"declaring":null,"name":"find","return":"def","parameters":["Predicate"]},{"declaring":null,"name":"findAll","return":"List","parameters":["Predicate"]},{"declaring":null,"name":"findResult","return":"def","parameters":["Function"]},{"declaring":null,"name":"findResult","return":"def","parameters":["def","Function"]},{"declaring":null,"name":"findResults","return":"List","parameters":["Function"]},{"declaring":"Iterable","name":"forEach","return":"void","parameters":["Consumer"]},{"declaring":"VersionScriptDocValues","name":"get","return":"String","parameters":["int"]},{"declaring":null,"name":"getByPath","return":"Object","parameters":["String"]},{"declaring":null,"name":"getByPath","return":"Object","parameters":["String","Object"]},{"declaring":null,"name":"getLength","return":"int","parameters":[]},{"declaring":"VersionScriptDocValues","name":"getValue","return":"String","parameters":[]},{"declaring":null,"name":"groupBy","return":"Map","parameters":["Function"]},{"declaring":"List","name":"hashCode","return":"int","parameters":[]},{"declaring":"List","name":"indexOf","return":"int","parameters":["def"]},{"declaring":"Collection","name":"isEmpty","return":"boolean","parameters":[]},{"declaring":"Iterable","name":"iterator","return":"Iterator","parameters":[]},{"declaring":null,"name":"join","return":"String","parameters":["String"]},{"declaring":"List","name":"lastIndexOf","return":"int","parameters":["def"]},{"declaring":"List","name":"listIterator","return":"ListIterator","parameters":[]},{"declaring":"List","name":"listIterator","return":"ListIterator","parameters":["int"]},{"declaring":"List","name":"remove","return":"def","parameters":["int"]},{"declaring":"Collection","name":"removeAll","return":"boolean","parameters":["Collection"]},{"declaring":"Collection","name":"removeIf","return":"boolean","parameters":["Predicate"]},{"declaring":"List","name":"replaceAll","return":"void","parameters":["UnaryOperator"]},{"declaring":"Collection","name":"retainAll","return":"boolean","parameters":["Collection"]},{"declaring":"List","name":"set","return":"def","parameters":["int","def"]},{"declaring":"Collection","name":"size","return":"int","parameters":[]},{"declaring":"List","name":"sort","return":"void","parameters":["Comparator"]},{"declaring":null,"name":"split","return":"List","parameters":["Predicate"]},{"declaring":"Collection","name":"spliterator","return":"Spliterator","parameters":[]},{"declaring":"Collection","name":"stream","return":"Stream","parameters":[]},{"declaring":"List","name":"subList","return":"List","parameters":["int","int"]},{"declaring":null,"name":"sum","return":"double","parameters":[]},{"declaring":null,"name":"sum","return":"double","parameters":["ToDoubleFunction"]},{"declaring":"Collection","name":"toArray","return":"def[]","parameters":[]},{"declaring":"Collection","name":"toArray","return":"def[]","parameters":["def[]"]},{"declaring":"Object","name":"toString","return":"String","parameters":[]}],"static_fields":[],"fields":[]}],"imported_methods":[{"declaring":null,"name":"saturation","return":"double","parameters":["double","double"]},{"declaring":null,"name":"sigmoid","return":"double","parameters":["double","double","double"]}],"class_bindings":[{"declaring":"org.elasticsearch.script.VectorScoreScriptUtils$CosineSimilarity","name":"cosineSimilarity","return":"double","read_only":3,"parameters":["org.elasticsearch.script.ScoreScript","java.lang.Object","java.lang.String"]},{"declaring":"org.elasticsearch.script.ScoreScriptUtils$DecayDateExp","name":"decayDateExp","return":"double","read_only":4,"parameters":["java.lang.String","java.lang.String","java.lang.String","double","org.elasticsearch.script.JodaCompatibleZonedDateTime"]},{"declaring":"org.elasticsearch.script.ScoreScriptUtils$DecayDateGauss","name":"decayDateGauss","return":"double","read_only":4,"parameters":["java.lang.String","java.lang.String","java.lang.String","double","org.elasticsearch.script.JodaCompatibleZonedDateTime"]},{"declaring":"org.elasticsearch.script.ScoreScriptUtils$DecayDateLinear","name":"decayDateLinear","return":"double","read_only":4,"parameters":["java.lang.String","java.lang.String","java.lang.String","double","org.elasticsearch.script.JodaCompatibleZonedDateTime"]},{"declaring":"org.elasticsearch.script.ScoreScriptUtils$DecayGeoExp","name":"decayGeoExp","return":"double","read_only":4,"parameters":["java.lang.String","java.lang.String","java.lang.String","double","org.elasticsearch.common.geo.GeoPoint"]},{"declaring":"org.elasticsearch.script.ScoreScriptUtils$DecayGeoGauss","name":"decayGeoGauss","return":"double","read_only":4,"parameters":["java.lang.String","java.lang.String","java.lang.String","double","org.elasticsearch.common.geo.GeoPoint"]},{"declaring":"org.elasticsearch.script.ScoreScriptUtils$DecayGeoLinear","name":"decayGeoLinear","return":"double","read_only":4,"parameters":["java.lang.String","java.lang.String","java.lang.String","double","org.elasticsearch.common.geo.GeoPoint"]},{"declaring":"org.elasticsearch.script.ScoreScriptUtils$DecayNumericExp","name":"decayNumericExp","return":"double","read_only":4,"parameters":["double","double","double","double","double"]},{"declaring":"org.elasticsearch.script.ScoreScriptUtils$DecayNumericGauss","name":"decayNumericGauss","return":"double","read_only":4,"parameters":["double","double","double","double","double"]},{"declaring":"org.elasticsearch.script.ScoreScriptUtils$DecayNumericLinear","name":"decayNumericLinear","return":"double","read_only":4,"parameters":["double","double","double","double","double"]},{"declaring":"org.elasticsearch.script.VectorScoreScriptUtils$DotProduct","name":"dotProduct","return":"double","read_only":3,"parameters":["org.elasticsearch.script.ScoreScript","java.lang.Object","java.lang.String"]},{"declaring":"org.elasticsearch.script.VectorScoreScriptUtils$L1Norm","name":"l1norm","return":"double","read_only":3,"parameters":["org.elasticsearch.script.ScoreScript","java.lang.Object","java.lang.String"]},{"declaring":"org.elasticsearch.script.VectorScoreScriptUtils$L2Norm","name":"l2norm","return":"double","read_only":3,"parameters":["org.elasticsearch.script.ScoreScript","java.lang.Object","java.lang.String"]},{"declaring":"org.elasticsearch.script.ScoreScriptUtils$RandomScoreDoc","name":"randomScore","return":"double","read_only":2,"parameters":["org.elasticsearch.script.ScoreScript","int"]},{"declaring":"org.elasticsearch.script.ScoreScriptUtils$RandomScoreField","name":"randomScore","return":"double","read_only":3,"parameters":["org.elasticsearch.script.ScoreScript","int","java.lang.String"]}],"instance_bindings":[]}
diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.score.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.score.txt
index e1769d28e226..b0506e7aa677 100644
--- a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.score.txt
+++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.score.txt
@@ -27,9 +27,9 @@ static_import {
double decayDateLinear(String, String, String, double, ZonedDateTime) bound_to org.elasticsearch.script.ScoreScriptUtils$DecayDateLinear
double decayDateExp(String, String, String, double, ZonedDateTime) bound_to org.elasticsearch.script.ScoreScriptUtils$DecayDateExp
double decayDateGauss(String, String, String, double, ZonedDateTime) bound_to org.elasticsearch.script.ScoreScriptUtils$DecayDateGauss
- double l1norm(org.elasticsearch.script.ScoreScript, List, String) bound_to org.elasticsearch.script.VectorScoreScriptUtils$L1Norm
- double l2norm(org.elasticsearch.script.ScoreScript, List, String) bound_to org.elasticsearch.script.VectorScoreScriptUtils$L2Norm
- double cosineSimilarity(org.elasticsearch.script.ScoreScript, List, String) bound_to org.elasticsearch.script.VectorScoreScriptUtils$CosineSimilarity
- double dotProduct(org.elasticsearch.script.ScoreScript, List, String) bound_to org.elasticsearch.script.VectorScoreScriptUtils$DotProduct
+ double l1norm(org.elasticsearch.script.ScoreScript, Object, String) bound_to org.elasticsearch.script.VectorScoreScriptUtils$L1Norm
+ double l2norm(org.elasticsearch.script.ScoreScript, Object, String) bound_to org.elasticsearch.script.VectorScoreScriptUtils$L2Norm
+ double cosineSimilarity(org.elasticsearch.script.ScoreScript, Object, String) bound_to org.elasticsearch.script.VectorScoreScriptUtils$CosineSimilarity
+ double dotProduct(org.elasticsearch.script.ScoreScript, Object, String) bound_to org.elasticsearch.script.VectorScoreScriptUtils$DotProduct
}
diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/145_dense_vector_byte_basic.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/145_dense_vector_byte_basic.yml
index 6ac4ba01c34e..7d7f7fb01a7b 100644
--- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/145_dense_vector_byte_basic.yml
+++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/145_dense_vector_byte_basic.yml
@@ -77,7 +77,35 @@ setup:
- match: {hits.hits.2._id: "1"}
- match: {hits.hits.2._score: 1632.0}
+---
+"Dot Product hexidecimal":
+ - requires:
+ cluster_features: "gte_v8.15.0"
+ reason: "support for hexidecimal byte vectors added in 8.15"
+ - do:
+ headers:
+ Content-Type: application/json
+ search:
+ rest_total_hits_as_int: true
+ body:
+ query:
+ script_score:
+ query: {match_all: {} }
+ script:
+ source: "dotProduct(params.query_vector, 'vector')"
+ params:
+ query_vector: "006ff30e84"
+ - match: {hits.total: 3}
+
+ - match: {hits.hits.0._id: "2"}
+ - match: {hits.hits.0._score: 28732.0}
+
+ - match: {hits.hits.1._id: "3"}
+ - match: {hits.hits.1._score: 17439.0}
+
+ - match: {hits.hits.2._id: "1"}
+ - match: {hits.hits.2._score: 1632.0}
---
"Cosine Similarity":
- do:
@@ -108,6 +136,39 @@ setup:
- gte: {hits.hits.2._score: 0.509}
- lte: {hits.hits.2._score: 0.512}
+---
+"Cosine Similarity hexidecimal":
+ - requires:
+ cluster_features: "gte_v8.15.0"
+ reason: "support for hexidecimal byte vectors added in 8.15"
+ - do:
+ headers:
+ Content-Type: application/json
+ search:
+ rest_total_hits_as_int: true
+ body:
+ query:
+ script_score:
+ query: {match_all: {} }
+ script:
+ source: "cosineSimilarity(params.query_vector, 'vector')"
+ params:
+ query_vector: "006ff30e84"
+
+ - match: {hits.total: 3}
+
+ - match: {hits.hits.0._id: "2"}
+ - gte: {hits.hits.0._score: 0.995}
+ - lte: {hits.hits.0._score: 0.998}
+
+ - match: {hits.hits.1._id: "3"}
+ - gte: {hits.hits.1._score: 0.829}
+ - lte: {hits.hits.1._score: 0.832}
+
+ - match: {hits.hits.2._id: "1"}
+ - gte: {hits.hits.2._score: 0.509}
+ - lte: {hits.hits.2._score: 0.512}
+
---
"Cosine similarity with indexed vector":
- do:
diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/155_dense_vector_byte_l1l2.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/155_dense_vector_byte_l1l2.yml
index c3d008ea69d0..add9356df703 100644
--- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/155_dense_vector_byte_l1l2.yml
+++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/155_dense_vector_byte_l1l2.yml
@@ -70,6 +70,35 @@ setup:
- gte: {hits.hits.2._score: 29.0}
---
+"L1 norm hexidecimal":
+ - requires:
+ cluster_features: "gte_v8.15.0"
+ reason: "support for hexidecimal byte vectors added in 8.15"
+ - do:
+ headers:
+ Content-Type: application/json
+ search:
+ rest_total_hits_as_int: true
+ body:
+ query:
+ script_score:
+ query: {match_all: {} }
+ script:
+ source: "l1norm(params.query_vector, 'my_dense_vector')"
+ params:
+ query_vector: "006ff30e84"
+
+ - match: {hits.total: 3}
+
+ - match: {hits.hits.0._id: "1"}
+ - match: {hits.hits.0._score: 246.0}
+
+ - match: {hits.hits.1._id: "3"}
+ - match: {hits.hits.1._score: 117.0}
+
+ - match: {hits.hits.2._id: "2"}
+ - gte: {hits.hits.2._score: 29.0}
+---
"L2 norm":
- do:
headers:
@@ -95,6 +124,38 @@ setup:
- gte: {hits.hits.1._score: 94.407}
- lte: {hits.hits.1._score: 94.41}
+ - match: {hits.hits.2._id: "2"}
+ - gte: {hits.hits.2._score: 15.263}
+ - lte: {hits.hits.2._score: 15.266}
+---
+"L2 norm hexidecimal":
+ - requires:
+ cluster_features: "gte_v8.15.0"
+ reason: "support for hexidecimal byte vectors added in 8.15"
+ - do:
+ headers:
+ Content-Type: application/json
+ search:
+ rest_total_hits_as_int: true
+ body:
+ query:
+ script_score:
+ query: {match_all: {} }
+ script:
+ source: "l2norm(params.query_vector, 'my_dense_vector')"
+ params:
+ query_vector: "006ff30e84"
+
+ - match: {hits.total: 3}
+
+ - match: {hits.hits.0._id: "1"}
+ - gte: {hits.hits.0._score: 158.624}
+ - lte: {hits.hits.0._score: 158.627}
+
+ - match: {hits.hits.1._id: "3"}
+ - gte: {hits.hits.1._score: 94.407}
+ - lte: {hits.hits.1._score: 94.41}
+
- match: {hits.hits.2._id: "2"}
- gte: {hits.hits.2._score: 15.263}
- lte: {hits.hits.2._score: 15.266}
diff --git a/server/src/main/java/org/elasticsearch/script/VectorScoreScriptUtils.java b/server/src/main/java/org/elasticsearch/script/VectorScoreScriptUtils.java
index f5d4a3d66be4..b071739321ea 100644
--- a/server/src/main/java/org/elasticsearch/script/VectorScoreScriptUtils.java
+++ b/server/src/main/java/org/elasticsearch/script/VectorScoreScriptUtils.java
@@ -13,6 +13,7 @@ import org.elasticsearch.script.field.vectors.DenseVector;
import org.elasticsearch.script.field.vectors.DenseVectorDocValuesField;
import java.io.IOException;
+import java.util.HexFormat;
import java.util.List;
public class VectorScoreScriptUtils {
@@ -65,6 +66,23 @@ public class VectorScoreScriptUtils {
this.qvMagnitude = (float) Math.sqrt(queryMagnitude);
field.getElementType().checkVectorBounds(validateValues);
}
+
+ /**
+ * Constructs a dense vector function used for byte-sized vectors.
+ *
+ * @param scoreScript The script in which this function was referenced.
+ * @param field The vector field.
+ * @param queryVector The query vector.
+ */
+ public ByteDenseVectorFunction(ScoreScript scoreScript, DenseVectorDocValuesField field, byte[] queryVector) {
+ super(scoreScript, field);
+ this.queryVector = queryVector;
+ float queryMagnitude = 0.0f;
+ for (byte value : queryVector) {
+ queryMagnitude += value * value;
+ }
+ this.qvMagnitude = (float) Math.sqrt(queryMagnitude);
+ }
}
public static class FloatDenseVectorFunction extends DenseVectorFunction {
@@ -116,6 +134,10 @@ public class VectorScoreScriptUtils {
super(scoreScript, field, queryVector);
}
+ public ByteL1Norm(ScoreScript scoreScript, DenseVectorDocValuesField field, byte[] queryVector) {
+ super(scoreScript, field, queryVector);
+ }
+
public double l1norm() {
setNextVector();
return field.get().l1Norm(queryVector);
@@ -138,11 +160,25 @@ public class VectorScoreScriptUtils {
private final L1NormInterface function;
- public L1Norm(ScoreScript scoreScript, List queryVector, String fieldName) {
+ @SuppressWarnings("unchecked")
+ public L1Norm(ScoreScript scoreScript, Object queryVector, String fieldName) {
DenseVectorDocValuesField field = (DenseVectorDocValuesField) scoreScript.field(fieldName);
function = switch (field.getElementType()) {
- case BYTE -> new ByteL1Norm(scoreScript, field, queryVector);
- case FLOAT -> new FloatL1Norm(scoreScript, field, queryVector);
+ case BYTE -> {
+ if (queryVector instanceof List) {
+ yield new ByteL1Norm(scoreScript, field, (List) queryVector);
+ } else if (queryVector instanceof String s) {
+ byte[] parsedQueryVector = HexFormat.of().parseHex(s);
+ yield new ByteL1Norm(scoreScript, field, parsedQueryVector);
+ }
+ throw new IllegalArgumentException("Unsupported input object for byte vectors: " + queryVector.getClass().getName());
+ }
+ case FLOAT -> {
+ if (queryVector instanceof List) {
+ yield new FloatL1Norm(scoreScript, field, (List) queryVector);
+ }
+ throw new IllegalArgumentException("Unsupported input object for float vectors: " + queryVector.getClass().getName());
+ }
};
}
@@ -162,6 +198,10 @@ public class VectorScoreScriptUtils {
super(scoreScript, field, queryVector);
}
+ public ByteL2Norm(ScoreScript scoreScript, DenseVectorDocValuesField field, byte[] queryVector) {
+ super(scoreScript, field, queryVector);
+ }
+
public double l2norm() {
setNextVector();
return field.get().l2Norm(queryVector);
@@ -184,11 +224,25 @@ public class VectorScoreScriptUtils {
private final L2NormInterface function;
- public L2Norm(ScoreScript scoreScript, List queryVector, String fieldName) {
+ @SuppressWarnings("unchecked")
+ public L2Norm(ScoreScript scoreScript, Object queryVector, String fieldName) {
DenseVectorDocValuesField field = (DenseVectorDocValuesField) scoreScript.field(fieldName);
function = switch (field.getElementType()) {
- case BYTE -> new ByteL2Norm(scoreScript, field, queryVector);
- case FLOAT -> new FloatL2Norm(scoreScript, field, queryVector);
+ case BYTE -> {
+ if (queryVector instanceof List) {
+ yield new ByteL2Norm(scoreScript, field, (List) queryVector);
+ } else if (queryVector instanceof String s) {
+ byte[] parsedQueryVector = HexFormat.of().parseHex(s);
+ yield new ByteL2Norm(scoreScript, field, parsedQueryVector);
+ }
+ throw new IllegalArgumentException("Unsupported input object for byte vectors: " + queryVector.getClass().getName());
+ }
+ case FLOAT -> {
+ if (queryVector instanceof List) {
+ yield new FloatL2Norm(scoreScript, field, (List) queryVector);
+ }
+ throw new IllegalArgumentException("Unsupported input object for float vectors: " + queryVector.getClass().getName());
+ }
};
}
@@ -208,6 +262,10 @@ public class VectorScoreScriptUtils {
super(scoreScript, field, queryVector);
}
+ public ByteDotProduct(ScoreScript scoreScript, DenseVectorDocValuesField field, byte[] queryVector) {
+ super(scoreScript, field, queryVector);
+ }
+
public double dotProduct() {
setNextVector();
return field.get().dotProduct(queryVector);
@@ -230,11 +288,25 @@ public class VectorScoreScriptUtils {
private final DotProductInterface function;
- public DotProduct(ScoreScript scoreScript, List queryVector, String fieldName) {
+ @SuppressWarnings("unchecked")
+ public DotProduct(ScoreScript scoreScript, Object queryVector, String fieldName) {
DenseVectorDocValuesField field = (DenseVectorDocValuesField) scoreScript.field(fieldName);
function = switch (field.getElementType()) {
- case BYTE -> new ByteDotProduct(scoreScript, field, queryVector);
- case FLOAT -> new FloatDotProduct(scoreScript, field, queryVector);
+ case BYTE -> {
+ if (queryVector instanceof List) {
+ yield new ByteDotProduct(scoreScript, field, (List) queryVector);
+ } else if (queryVector instanceof String s) {
+ byte[] parsedQueryVector = HexFormat.of().parseHex(s);
+ yield new ByteDotProduct(scoreScript, field, parsedQueryVector);
+ }
+ throw new IllegalArgumentException("Unsupported input object for byte vectors: " + queryVector.getClass().getName());
+ }
+ case FLOAT -> {
+ if (queryVector instanceof List) {
+ yield new FloatDotProduct(scoreScript, field, (List) queryVector);
+ }
+ throw new IllegalArgumentException("Unsupported input object for float vectors: " + queryVector.getClass().getName());
+ }
};
}
@@ -254,6 +326,10 @@ public class VectorScoreScriptUtils {
super(scoreScript, field, queryVector);
}
+ public ByteCosineSimilarity(ScoreScript scoreScript, DenseVectorDocValuesField field, byte[] queryVector) {
+ super(scoreScript, field, queryVector);
+ }
+
public double cosineSimilarity() {
setNextVector();
return field.get().cosineSimilarity(queryVector, qvMagnitude);
@@ -276,11 +352,25 @@ public class VectorScoreScriptUtils {
private final CosineSimilarityInterface function;
- public CosineSimilarity(ScoreScript scoreScript, List queryVector, String fieldName) {
+ @SuppressWarnings("unchecked")
+ public CosineSimilarity(ScoreScript scoreScript, Object queryVector, String fieldName) {
DenseVectorDocValuesField field = (DenseVectorDocValuesField) scoreScript.field(fieldName);
function = switch (field.getElementType()) {
- case BYTE -> new ByteCosineSimilarity(scoreScript, field, queryVector);
- case FLOAT -> new FloatCosineSimilarity(scoreScript, field, queryVector);
+ case BYTE -> {
+ if (queryVector instanceof List) {
+ yield new ByteCosineSimilarity(scoreScript, field, (List) queryVector);
+ } else if (queryVector instanceof String s) {
+ byte[] parsedQueryVector = HexFormat.of().parseHex(s);
+ yield new ByteCosineSimilarity(scoreScript, field, parsedQueryVector);
+ }
+ throw new IllegalArgumentException("Unsupported input object for byte vectors: " + queryVector.getClass().getName());
+ }
+ case FLOAT -> {
+ if (queryVector instanceof List) {
+ yield new FloatCosineSimilarity(scoreScript, field, (List) queryVector);
+ }
+ throw new IllegalArgumentException("Unsupported input object for float vectors: " + queryVector.getClass().getName());
+ }
};
}
diff --git a/server/src/test/java/org/elasticsearch/script/VectorScoreScriptUtilsTests.java b/server/src/test/java/org/elasticsearch/script/VectorScoreScriptUtilsTests.java
index df9f4384719e..a095c4e6409a 100644
--- a/server/src/test/java/org/elasticsearch/script/VectorScoreScriptUtilsTests.java
+++ b/server/src/test/java/org/elasticsearch/script/VectorScoreScriptUtilsTests.java
@@ -26,6 +26,7 @@ import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.Arrays;
+import java.util.HexFormat;
import java.util.List;
import static org.hamcrest.Matchers.containsString;
@@ -128,6 +129,7 @@ public class VectorScoreScriptUtilsTests extends ESTestCase {
float[] docVector = new float[] { 1, 127, -128, 5, -10 };
List queryVector = Arrays.asList((byte) 1, (byte) 125, (byte) -12, (byte) 2, (byte) 4);
List invalidQueryVector = Arrays.asList((byte) 1, (byte) 1);
+ String hexidecimalString = HexFormat.of().formatHex(new byte[] { 1, 125, -12, 2, 4 });
List fields = List.of(
new ByteBinaryDenseVectorDocValuesField(
@@ -154,6 +156,14 @@ public class VectorScoreScriptUtilsTests extends ESTestCase {
0.001
);
+ function = new CosineSimilarity(scoreScript, hexidecimalString, fieldName);
+ assertEquals(
+ "cosineSimilarity result is not equal to the expected value!",
+ cosineSimilarityExpected,
+ function.cosineSimilarity(),
+ 0.001
+ );
+
// Test normalization for cosineSimilarity
float[] queryVectorArray = new float[queryVector.size()];
for (int i = 0; i < queryVectorArray.length; i++) {
@@ -191,10 +201,13 @@ public class VectorScoreScriptUtilsTests extends ESTestCase {
);
// Check scripting infrastructure integration
- DotProduct dotProduct = new DotProduct(scoreScript, queryVector, fieldName);
- assertEquals(17382.0, dotProduct.dotProduct(), 0.001);
+ assertEquals(17382.0, new DotProduct(scoreScript, queryVector, fieldName).dotProduct(), 0.001);
+ assertEquals(17382.0, new DotProduct(scoreScript, hexidecimalString, fieldName).dotProduct(), 0.001);
assertEquals(135.0, new L1Norm(scoreScript, queryVector, fieldName).l1norm(), 0.001);
+ assertEquals(135.0, new L1Norm(scoreScript, hexidecimalString, fieldName).l1norm(), 0.001);
assertEquals(116.897, new L2Norm(scoreScript, queryVector, fieldName).l2norm(), 0.001);
+ assertEquals(116.897, new L2Norm(scoreScript, hexidecimalString, fieldName).l2norm(), 0.001);
+ DotProduct dotProduct = new DotProduct(scoreScript, queryVector, fieldName);
when(scoreScript._getDocId()).thenReturn(1);
e = expectThrows(IllegalArgumentException.class, dotProduct::dotProduct);
assertEquals("A document doesn't have a value for a vector field!", e.getMessage());
From 98b0842d02363b52a487783c6d4eb75c6d0f1dc0 Mon Sep 17 00:00:00 2001
From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com>
Date: Tue, 11 Jun 2024 16:46:35 -0400
Subject: [PATCH 18/34] [ML] Fixing inference API elasticsearch service task
type always set to rerank (#109592)
* Fixing rerank hard coded task type for eland
* Update docs/changelog/109592.yaml
* Delete docs/changelog/109592.yaml
* Randomizing task type for tests
---
.../ElasticsearchInternalService.java | 10 +---
.../ElasticsearchInternalServiceTests.java | 56 +++++++++++++++++--
2 files changed, 54 insertions(+), 12 deletions(-)
diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java
index df546efd161e..dbc36960a823 100644
--- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java
+++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java
@@ -132,16 +132,10 @@ public class ElasticsearchInternalService implements InferenceService {
).build();
throwIfNotEmptyMap(serviceSettingsMap, name());
- var taskSettings = CustomElandModel.taskSettingsFromMap(TaskType.RERANK, taskSettingsMap);
+ var taskSettings = CustomElandModel.taskSettingsFromMap(taskType, taskSettingsMap);
throwIfNotEmptyMap(taskSettingsMap, name());
- var model = CustomElandModel.build(
- inferenceEntityId,
- TaskType.RERANK,
- name(),
- customElandInternalServiceSettings,
- taskSettings
- );
+ var model = CustomElandModel.build(inferenceEntityId, taskType, name(), customElandInternalServiceSettings, taskSettings);
delegate.onResponse(model);
}
});
diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java
index e34ce410bbab..dfcfe466c2a3 100644
--- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java
+++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java
@@ -11,7 +11,9 @@ package org.elasticsearch.xpack.inference.services.elasticsearch;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.client.internal.Client;
+import org.elasticsearch.core.TimeValue;
import org.elasticsearch.inference.ChunkedInferenceServiceResults;
import org.elasticsearch.inference.ChunkingOptions;
import org.elasticsearch.inference.InferenceResults;
@@ -55,7 +57,6 @@ import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
-import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
@@ -63,6 +64,7 @@ import java.util.concurrent.atomic.AtomicReference;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.instanceOf;
+import static org.hamcrest.Matchers.is;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.same;
import static org.mockito.Mockito.doAnswer;
@@ -83,7 +85,7 @@ public class ElasticsearchInternalServiceTests extends ESTestCase {
@After
public void shutdownThreadPool() {
- TestThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS);
+ terminate(threadPool);
}
public void testParseRequestConfig() {
@@ -290,7 +292,7 @@ public class ElasticsearchInternalServiceTests extends ESTestCase {
assertEquals(returnDocs, ((CustomElandRerankTaskSettings) model.getTaskSettings()).returnDocuments());
}, e -> { fail("Model parsing failed " + e.getMessage()); });
- service.parseRequestConfig(randomInferenceEntityId, taskType, settings, Set.of(), modelListener);
+ service.parseRequestConfig(randomInferenceEntityId, TaskType.RERANK, settings, Set.of(), modelListener);
}
}
@@ -332,7 +334,7 @@ public class ElasticsearchInternalServiceTests extends ESTestCase {
assertEquals(Boolean.TRUE, ((CustomElandRerankTaskSettings) model.getTaskSettings()).returnDocuments());
}, e -> { fail("Model parsing failed " + e.getMessage()); });
- service.parseRequestConfig(randomInferenceEntityId, taskType, settings, Set.of(), modelListener);
+ service.parseRequestConfig(randomInferenceEntityId, TaskType.RERANK, settings, Set.of(), modelListener);
}
}
@@ -671,6 +673,52 @@ public class ElasticsearchInternalServiceTests extends ESTestCase {
}
}
+ public void testParseRequestConfigEland_PreservesTaskType() {
+ var client = mock(Client.class);
+ doAnswer(invocationOnMock -> {
+ @SuppressWarnings("unchecked")
+ ActionListener listener = (ActionListener) invocationOnMock
+ .getArguments()[2];
+ listener.onResponse(
+ new GetTrainedModelsAction.Response(new QueryPage<>(List.of(mock(TrainedModelConfig.class)), 1, mock(ParseField.class)))
+ );
+ return Void.TYPE;
+ }).when(client).execute(any(), any(), any());
+ when(client.threadPool()).thenReturn(threadPool);
+
+ var service = createService(client);
+ var settings = new HashMap();
+ settings.put(
+ ModelConfigurations.SERVICE_SETTINGS,
+ new HashMap<>(
+ Map.of(
+ ElasticsearchInternalServiceSettings.NUM_ALLOCATIONS,
+ 1,
+ ElasticsearchInternalServiceSettings.NUM_THREADS,
+ 4,
+ InternalServiceSettings.MODEL_ID,
+ "custom-model"
+ )
+ )
+ );
+
+ var serviceSettings = new CustomElandInternalServiceSettings(1, 4, "custom-model");
+ var taskType = randomFrom(TaskType.values());
+ var taskSettings = taskType == TaskType.RERANK ? CustomElandRerankTaskSettings.DEFAULT_SETTINGS : null;
+ var expectedModel = CustomElandModel.build(
+ randomInferenceEntityId,
+ taskType,
+ ElasticsearchInternalService.NAME,
+ serviceSettings,
+ taskSettings
+ );
+
+ PlainActionFuture listener = new PlainActionFuture<>();
+ service.parseRequestConfig(randomInferenceEntityId, taskType, settings, Set.of(), listener);
+ var model = listener.actionGet(TimeValue.THIRTY_SECONDS);
+ assertThat(model, is(expectedModel));
+ }
+
public void testBuildInferenceRequest() {
var id = randomAlphaOfLength(5);
var inputs = randomList(1, 3, () -> randomAlphaOfLength(4));
From f9c4e90609e452261ca92cafd72bf7584c7f8820 Mon Sep 17 00:00:00 2001
From: Benjamin Trent
Date: Tue, 11 Jun 2024 16:57:48 -0400
Subject: [PATCH 19/34] Adjust bwc version after #109492 backport (#109596)
blocked by: https://github.com/elastic/elasticsearch/pull/109595
related to: #109492
---
.../test/painless/145_dense_vector_byte_basic.yml | 8 ++++----
.../test/painless/155_dense_vector_byte_l1l2.yml | 8 ++++----
2 files changed, 8 insertions(+), 8 deletions(-)
diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/145_dense_vector_byte_basic.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/145_dense_vector_byte_basic.yml
index 7d7f7fb01a7b..4eb8df25c27b 100644
--- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/145_dense_vector_byte_basic.yml
+++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/145_dense_vector_byte_basic.yml
@@ -80,8 +80,8 @@ setup:
---
"Dot Product hexidecimal":
- requires:
- cluster_features: "gte_v8.15.0"
- reason: "support for hexidecimal byte vectors added in 8.15"
+ cluster_features: "gte_v8.14.1"
+ reason: "support for hexidecimal byte vectors added in 8.14"
- do:
headers:
Content-Type: application/json
@@ -139,8 +139,8 @@ setup:
---
"Cosine Similarity hexidecimal":
- requires:
- cluster_features: "gte_v8.15.0"
- reason: "support for hexidecimal byte vectors added in 8.15"
+ cluster_features: "gte_v8.14.1"
+ reason: "support for hexidecimal byte vectors added in 8.14"
- do:
headers:
Content-Type: application/json
diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/155_dense_vector_byte_l1l2.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/155_dense_vector_byte_l1l2.yml
index add9356df703..46075c5db744 100644
--- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/155_dense_vector_byte_l1l2.yml
+++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/155_dense_vector_byte_l1l2.yml
@@ -72,8 +72,8 @@ setup:
---
"L1 norm hexidecimal":
- requires:
- cluster_features: "gte_v8.15.0"
- reason: "support for hexidecimal byte vectors added in 8.15"
+ cluster_features: "gte_v8.14.1"
+ reason: "support for hexidecimal byte vectors added in 8.14"
- do:
headers:
Content-Type: application/json
@@ -130,8 +130,8 @@ setup:
---
"L2 norm hexidecimal":
- requires:
- cluster_features: "gte_v8.15.0"
- reason: "support for hexidecimal byte vectors added in 8.15"
+ cluster_features: "gte_v8.14.1"
+ reason: "support for hexidecimal byte vectors added in 8.14"
- do:
headers:
Content-Type: application/json
From 113087ea74fbd1fb00c281fc0693c10d86fc3cd5 Mon Sep 17 00:00:00 2001
From: Nik Everett
Date: Tue, 11 Jun 2024 17:06:32 -0400
Subject: [PATCH 20/34] ESQL: Move serialization for unary functions (#109436)
This moves the serialization for unary functions from `PlanNamedTypes`
to the classes themselves which lines up more with how the rest of
Elasticsearch works. There's a little more ceremony as the subclasses
themselves declare serialization constructors. *EVERY* subclass does.
And has to declare `getWriteableName`. Now, those serialization ctors
are just `super(in);` so you can't screw it up, but they are there and
that's more than before. But, again, it's what folks coming from the
rest of Elasticsearch will expect.
There is some silver lining here - it's much easier to make wire changes
with these serialization ctors. And, maybe better, it's very easy to
*know* that this class goes over the wire - it has a serialization ctor.
It's right there, staring at you.
There are other ways to do this, but I chose to put the serialization
ctors right in the class for the three reasons above.
---
.../esql/core/expression/Expression.java | 17 +-
.../function/scalar/UnaryScalarFunction.java | 102 +++++++++++
.../convert/AbstractConvertFunction.java | 7 +
.../function/scalar/convert/FromBase64.java | 17 ++
.../function/scalar/convert/ToBase64.java | 12 ++
.../function/scalar/convert/ToBoolean.java | 17 ++
.../scalar/convert/ToCartesianPoint.java | 17 ++
.../scalar/convert/ToCartesianShape.java | 17 ++
.../function/scalar/convert/ToDatetime.java | 17 ++
.../function/scalar/convert/ToDegrees.java | 18 ++
.../function/scalar/convert/ToDouble.java | 13 ++
.../function/scalar/convert/ToGeoPoint.java | 17 ++
.../function/scalar/convert/ToGeoShape.java | 17 ++
.../function/scalar/convert/ToIP.java | 13 ++
.../function/scalar/convert/ToInteger.java | 17 ++
.../function/scalar/convert/ToLong.java | 13 ++
.../function/scalar/convert/ToRadians.java | 18 ++
.../function/scalar/convert/ToString.java | 13 ++
.../scalar/convert/ToUnsignedLong.java | 17 ++
.../function/scalar/convert/ToVersion.java | 17 ++
.../expression/function/scalar/math/Abs.java | 14 ++
.../math/AbstractTrigonometricFunction.java | 6 +
.../expression/function/scalar/math/Acos.java | 14 ++
.../expression/function/scalar/math/Asin.java | 14 ++
.../expression/function/scalar/math/Atan.java | 14 ++
.../expression/function/scalar/math/Cbrt.java | 14 ++
.../expression/function/scalar/math/Ceil.java | 14 ++
.../expression/function/scalar/math/Cos.java | 14 ++
.../expression/function/scalar/math/Cosh.java | 14 ++
.../function/scalar/math/Floor.java | 14 ++
.../function/scalar/math/Log10.java | 14 ++
.../function/scalar/math/Signum.java | 14 ++
.../expression/function/scalar/math/Sin.java | 13 ++
.../expression/function/scalar/math/Sinh.java | 14 ++
.../expression/function/scalar/math/Sqrt.java | 14 ++
.../expression/function/scalar/math/Tan.java | 14 ++
.../expression/function/scalar/math/Tanh.java | 14 ++
.../function/scalar/package-info.java | 20 ++-
.../function/scalar/spatial/StX.java | 14 ++
.../function/scalar/spatial/StY.java | 14 ++
.../function/scalar/string/LTrim.java | 14 ++
.../function/scalar/string/Length.java | 13 ++
.../function/scalar/string/RTrim.java | 14 ++
.../function/scalar/string/Trim.java | 14 ++
.../predicate/operator/arithmetic/Neg.java | 17 +-
.../esql/io/stream/PlanNameRegistry.java | 17 +-
.../xpack/esql/io/stream/PlanNamedTypes.java | 163 ++----------------
.../xpack/esql/plugin/EsqlPlugin.java | 2 +-
.../AbstractExpressionSerializationTests.java | 90 ++++++++++
...AbstractUnaryScalarSerializationTests.java | 36 ++++
.../convert/FromBase64SerializationTests.java | 19 ++
.../convert/ToBase64SerializationTests.java | 19 ++
.../convert/ToBooleanSerializationTests.java | 19 ++
.../ToCartesianPointSerializationTests.java | 19 ++
.../ToCartesianShapeSerializationTests.java | 19 ++
.../convert/ToDatetimeSerializationTests.java | 19 ++
.../convert/ToDegreesSerializationTests.java | 19 ++
.../convert/ToDoubleSerializationTests.java | 19 ++
.../convert/ToGeoPointSerializationTests.java | 19 ++
.../convert/ToGeoShapeSerializationTests.java | 19 ++
.../convert/ToIPSerializationTests.java | 19 ++
.../convert/ToIntegerSerializationTests.java | 19 ++
.../convert/ToLongSerializationTests.java | 19 ++
.../convert/ToRadiansSerializationTests.java | 19 ++
.../convert/ToStringSerializationTests.java | 19 ++
.../ToUnsignedLongSerializationTests.java | 19 ++
.../convert/ToVersionSerializationTests.java | 19 ++
.../scalar/math/AbsSerializationTests.java | 19 ++
.../scalar/math/AcosSerializationTests.java | 19 ++
.../scalar/math/AsinSerializationTests.java | 19 ++
.../scalar/math/AtanSerializationTests.java | 19 ++
.../scalar/math/CbrtSerializationTests.java | 19 ++
.../scalar/math/CeilSerializationTests.java | 19 ++
.../scalar/math/CosSerializationTests.java | 19 ++
.../scalar/math/CoshSerializationTests.java | 19 ++
.../scalar/math/FloorSerializationTests.java | 19 ++
.../scalar/math/Log10SerializationTests.java | 19 ++
.../scalar/math/SinSerializationTests.java | 19 ++
.../scalar/math/SinhSerializationTests.java | 19 ++
.../scalar/math/SqrtSerializationTests.java | 19 ++
.../scalar/math/TanSerializationTests.java | 19 ++
.../scalar/math/TanhSerializationTests.java | 19 ++
.../arithmetic/NegSerializationTests.java | 19 ++
.../esql/io/stream/PlanNamedTypesTests.java | 2 +-
84 files changed, 1519 insertions(+), 163 deletions(-)
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AbstractExpressionSerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AbstractUnaryScalarSerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64SerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64SerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanSerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointSerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeSerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeSerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesSerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleSerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointSerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeSerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPSerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerSerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongSerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansSerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringSerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongSerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionSerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsSerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosSerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinSerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanSerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtSerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilSerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosSerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshSerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorSerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10SerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinSerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhSerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtSerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanSerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhSerializationTests.java
create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegSerializationTests.java
diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expression.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expression.java
index 00765a8c0528..ee7e0aa81f81 100644
--- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expression.java
+++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/Expression.java
@@ -6,6 +6,8 @@
*/
package org.elasticsearch.xpack.esql.core.expression;
+import org.elasticsearch.common.io.stream.NamedWriteable;
+import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException;
import org.elasticsearch.xpack.esql.core.capabilities.Resolvable;
import org.elasticsearch.xpack.esql.core.capabilities.Resolvables;
@@ -14,6 +16,7 @@ import org.elasticsearch.xpack.esql.core.tree.Source;
import org.elasticsearch.xpack.esql.core.type.DataType;
import org.elasticsearch.xpack.esql.core.util.StringUtils;
+import java.io.IOException;
import java.util.List;
import java.util.function.Supplier;
@@ -26,7 +29,7 @@ import java.util.function.Supplier;
* a, b, ABS(c), and i are all Expressions, with ABS(c) being a Function
* (which is a type of expression) with a single child, c.
*/
-public abstract class Expression extends Node implements Resolvable {
+public abstract class Expression extends Node implements Resolvable, NamedWriteable {
public static class TypeResolution {
private final boolean failed;
@@ -78,6 +81,18 @@ public abstract class Expression extends Node implements Resolvable
super(source, children);
}
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ // TODO remove this function entirely once all subclasses implement it
+ throw new UnsupportedOperationException("todo unsupported");
+ }
+
+ @Override
+ public String getWriteableName() {
+ // TODO remove this function entirely once all subclasses implement it
+ throw new UnsupportedOperationException("todo unsupported");
+ }
+
// whether the expression can be evaluated statically (folded) or not
public boolean foldable() {
return false;
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java
index 2e29162aebb4..0866f97b6772 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/UnaryScalarFunction.java
@@ -7,16 +7,108 @@
package org.elasticsearch.xpack.esql.expression.function.scalar;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.xpack.esql.core.expression.Expression;
import org.elasticsearch.xpack.esql.core.expression.TypeResolutions;
import org.elasticsearch.xpack.esql.core.tree.Source;
import org.elasticsearch.xpack.esql.core.type.DataType;
+import org.elasticsearch.xpack.esql.expression.function.scalar.convert.FromBase64;
+import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBase64;
+import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBoolean;
+import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianPoint;
+import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianShape;
+import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDatetime;
+import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDegrees;
+import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDouble;
+import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoPoint;
+import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoShape;
+import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToIP;
+import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToInteger;
+import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToLong;
+import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToRadians;
+import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToString;
+import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToUnsignedLong;
+import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToVersion;
+import org.elasticsearch.xpack.esql.expression.function.scalar.math.Abs;
+import org.elasticsearch.xpack.esql.expression.function.scalar.math.Acos;
+import org.elasticsearch.xpack.esql.expression.function.scalar.math.Asin;
+import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan;
+import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cbrt;
+import org.elasticsearch.xpack.esql.expression.function.scalar.math.Ceil;
+import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cos;
+import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cosh;
+import org.elasticsearch.xpack.esql.expression.function.scalar.math.Floor;
+import org.elasticsearch.xpack.esql.expression.function.scalar.math.Log10;
+import org.elasticsearch.xpack.esql.expression.function.scalar.math.Signum;
+import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sin;
+import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sinh;
+import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sqrt;
+import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tan;
+import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tanh;
+import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StX;
+import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StY;
+import org.elasticsearch.xpack.esql.expression.function.scalar.string.LTrim;
+import org.elasticsearch.xpack.esql.expression.function.scalar.string.Length;
+import org.elasticsearch.xpack.esql.expression.function.scalar.string.RTrim;
+import org.elasticsearch.xpack.esql.expression.function.scalar.string.Trim;
+import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Neg;
+import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput;
+import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput;
+import java.io.IOException;
import java.util.Arrays;
+import java.util.List;
import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNumeric;
public abstract class UnaryScalarFunction extends EsqlScalarFunction {
+ public static List getNamedWriteables() {
+ return List.of(
+ Abs.ENTRY,
+ Acos.ENTRY,
+ Asin.ENTRY,
+ Atan.ENTRY,
+ Cbrt.ENTRY,
+ Ceil.ENTRY,
+ Cos.ENTRY,
+ Cosh.ENTRY,
+ Floor.ENTRY,
+ FromBase64.ENTRY,
+ Length.ENTRY,
+ Log10.ENTRY,
+ LTrim.ENTRY,
+ Neg.ENTRY,
+ RTrim.ENTRY,
+ Signum.ENTRY,
+ Sin.ENTRY,
+ Sinh.ENTRY,
+ Sqrt.ENTRY,
+ StX.ENTRY,
+ StY.ENTRY,
+ Tan.ENTRY,
+ Tanh.ENTRY,
+ ToBase64.ENTRY,
+ ToBoolean.ENTRY,
+ ToCartesianPoint.ENTRY,
+ ToDatetime.ENTRY,
+ ToDegrees.ENTRY,
+ ToDouble.ENTRY,
+ ToGeoShape.ENTRY,
+ ToCartesianShape.ENTRY,
+ ToGeoPoint.ENTRY,
+ ToIP.ENTRY,
+ ToInteger.ENTRY,
+ ToLong.ENTRY,
+ ToRadians.ENTRY,
+ ToString.ENTRY,
+ ToUnsignedLong.ENTRY,
+ ToVersion.ENTRY,
+ Trim.ENTRY
+ );
+ }
+
protected final Expression field;
public UnaryScalarFunction(Source source, Expression field) {
@@ -24,6 +116,16 @@ public abstract class UnaryScalarFunction extends EsqlScalarFunction {
this.field = field;
}
+ protected UnaryScalarFunction(StreamInput in) throws IOException {
+ this(Source.readFrom((PlanStreamInput) in), ((PlanStreamInput) in).readExpression());
+ }
+
+ @Override
+ public void writeTo(StreamOutput out) throws IOException {
+ source().writeTo(out);
+ ((PlanStreamOutput) out).writeExpression(field);
+ }
+
@Override
protected Expression.TypeResolution resolveType() {
if (childrenResolved() == false) {
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java
index f1d0256a1f1c..2496d8b82fa6 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java
@@ -11,6 +11,7 @@ import joptsimple.internal.Strings;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.data.Block;
import org.elasticsearch.compute.data.Page;
import org.elasticsearch.compute.data.Vector;
@@ -24,8 +25,10 @@ import org.elasticsearch.xpack.esql.core.tree.Source;
import org.elasticsearch.xpack.esql.core.type.DataType;
import org.elasticsearch.xpack.esql.expression.function.Warnings;
import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction;
+import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput;
import org.elasticsearch.xpack.esql.type.EsqlDataTypes;
+import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
@@ -53,6 +56,10 @@ public abstract class AbstractConvertFunction extends UnaryScalarFunction {
super(source, field);
}
+ protected AbstractConvertFunction(StreamInput in) throws IOException {
+ this(Source.readFrom((PlanStreamInput) in), ((PlanStreamInput) in).readExpression());
+ }
+
/**
* Build the evaluator given the evaluator a multivalued field.
*/
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64.java
index 2ebc3d824a4e..873d496bfc8f 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64.java
@@ -9,6 +9,8 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.Evaluator;
import org.elasticsearch.compute.ann.Fixed;
import org.elasticsearch.compute.operator.EvalOperator;
@@ -24,6 +26,7 @@ import org.elasticsearch.xpack.esql.expression.function.Param;
import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction;
import org.elasticsearch.xpack.esql.planner.PlannerUtils;
+import java.io.IOException;
import java.util.Base64;
import java.util.List;
import java.util.function.Function;
@@ -32,6 +35,11 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isStr
import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD;
public class FromBase64 extends UnaryScalarFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(
+ Expression.class,
+ "FromBase64",
+ FromBase64::new
+ );
@FunctionInfo(
returnType = "keyword",
@@ -53,6 +61,15 @@ public class FromBase64 extends UnaryScalarFunction {
return isString(field, sourceText(), TypeResolutions.ParamOrdinal.DEFAULT);
}
+ private FromBase64(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
public DataType dataType() {
return KEYWORD;
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64.java
index f24651337550..ab8287413c61 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64.java
@@ -9,6 +9,8 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.Evaluator;
import org.elasticsearch.compute.ann.Fixed;
import org.elasticsearch.compute.operator.EvalOperator;
@@ -24,6 +26,7 @@ import org.elasticsearch.xpack.esql.expression.function.Param;
import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction;
import org.elasticsearch.xpack.esql.planner.PlannerUtils;
+import java.io.IOException;
import java.util.Base64;
import java.util.List;
import java.util.function.Function;
@@ -32,6 +35,7 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isStr
import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD;
public class ToBase64 extends UnaryScalarFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "ToBase64", ToBase64::new);
@FunctionInfo(
returnType = "keyword",
@@ -40,7 +44,15 @@ public class ToBase64 extends UnaryScalarFunction {
)
public ToBase64(Source source, @Param(name = "string", type = { "keyword", "text" }, description = "A string.") Expression string) {
super(source, string);
+ }
+ private ToBase64(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
}
@Override
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java
index 5de31b7f114e..06cc99345643 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java
@@ -8,6 +8,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.ConvertEvaluator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
import org.elasticsearch.xpack.esql.core.tree.NodeInfo;
@@ -17,6 +19,7 @@ import org.elasticsearch.xpack.esql.expression.function.Example;
import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
+import java.io.IOException;
import java.util.List;
import java.util.Map;
@@ -31,6 +34,11 @@ import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToBo
import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToBoolean;
public class ToBoolean extends AbstractConvertFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(
+ Expression.class,
+ "ToBoolean",
+ ToBoolean::new
+ );
private static final Map EVALUATORS = Map.ofEntries(
Map.entry(BOOLEAN, (field, source) -> field),
@@ -62,6 +70,15 @@ public class ToBoolean extends AbstractConvertFunction {
super(source, field);
}
+ private ToBoolean(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
protected Map factories() {
return EVALUATORS;
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java
index 9a3362c52c0f..60a25fc91d50 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java
@@ -8,6 +8,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.ConvertEvaluator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
import org.elasticsearch.xpack.esql.core.tree.NodeInfo;
@@ -17,6 +19,7 @@ import org.elasticsearch.xpack.esql.expression.function.Example;
import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
+import java.io.IOException;
import java.util.List;
import java.util.Map;
@@ -26,6 +29,11 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT;
import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToSpatial;
public class ToCartesianPoint extends AbstractConvertFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(
+ Expression.class,
+ "ToCartesianPoint",
+ ToCartesianPoint::new
+ );
private static final Map EVALUATORS = Map.ofEntries(
Map.entry(CARTESIAN_POINT, (fieldEval, source) -> fieldEval),
@@ -52,6 +60,15 @@ public class ToCartesianPoint extends AbstractConvertFunction {
super(source, field);
}
+ private ToCartesianPoint(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
protected Map factories() {
return EVALUATORS;
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShape.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShape.java
index 03e8c565f342..03ac4bdf4824 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShape.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShape.java
@@ -8,6 +8,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.ConvertEvaluator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
import org.elasticsearch.xpack.esql.core.tree.NodeInfo;
@@ -17,6 +19,7 @@ import org.elasticsearch.xpack.esql.expression.function.Example;
import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
+import java.io.IOException;
import java.util.List;
import java.util.Map;
@@ -27,6 +30,11 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT;
import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToSpatial;
public class ToCartesianShape extends AbstractConvertFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(
+ Expression.class,
+ "ToCartesianShape",
+ ToCartesianShape::new
+ );
private static final Map EVALUATORS = Map.ofEntries(
Map.entry(CARTESIAN_POINT, (fieldEval, source) -> fieldEval),
@@ -54,6 +62,15 @@ public class ToCartesianShape extends AbstractConvertFunction {
super(source, field);
}
+ private ToCartesianShape(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
protected Map factories() {
return EVALUATORS;
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java
index 53c87427b960..917abc9d7716 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java
@@ -8,6 +8,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.ConvertEvaluator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
import org.elasticsearch.xpack.esql.core.tree.NodeInfo;
@@ -17,6 +19,7 @@ import org.elasticsearch.xpack.esql.expression.function.Example;
import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
+import java.io.IOException;
import java.util.List;
import java.util.Map;
@@ -30,6 +33,11 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG;
import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToLong;
public class ToDatetime extends AbstractConvertFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(
+ Expression.class,
+ "ToDatetime",
+ ToDatetime::new
+ );
private static final Map EVALUATORS = Map.ofEntries(
Map.entry(DATETIME, (field, source) -> field),
@@ -80,6 +88,15 @@ public class ToDatetime extends AbstractConvertFunction {
super(source, field);
}
+ private ToDatetime(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
protected Map factories() {
return EVALUATORS;
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java
index af163b6ff878..4eb6662e3e97 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java
@@ -7,6 +7,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.ConvertEvaluator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
import org.elasticsearch.xpack.esql.core.tree.NodeInfo;
@@ -18,6 +20,7 @@ import org.elasticsearch.xpack.esql.expression.function.Example;
import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
+import java.io.IOException;
import java.util.List;
import java.util.Map;
@@ -31,6 +34,12 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG;
* to degrees.
*/
public class ToDegrees extends AbstractConvertFunction implements EvaluatorMapper {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(
+ Expression.class,
+ "ToDegrees",
+ ToDegrees::new
+ );
+
private static final Map EVALUATORS = Map.ofEntries(
Map.entry(DOUBLE, ToDegreesEvaluator.Factory::new),
Map.entry(INTEGER, (field, source) -> new ToDegreesEvaluator.Factory(new ToDoubleFromIntEvaluator.Factory(field, source), source)),
@@ -57,6 +66,15 @@ public class ToDegrees extends AbstractConvertFunction implements EvaluatorMappe
super(source, field);
}
+ private ToDegrees(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
protected Map factories() {
return EVALUATORS;
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java
index b30162ef2334..de88281e7dbd 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java
@@ -8,6 +8,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.ConvertEvaluator;
import org.elasticsearch.xpack.esql.core.InvalidArgumentException;
import org.elasticsearch.xpack.esql.core.expression.Expression;
@@ -18,6 +20,7 @@ import org.elasticsearch.xpack.esql.expression.function.Example;
import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
+import java.io.IOException;
import java.util.List;
import java.util.Map;
@@ -33,6 +36,7 @@ import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToDo
import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToDouble;
public class ToDouble extends AbstractConvertFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "ToDouble", ToDouble::new);
private static final Map EVALUATORS = Map.ofEntries(
Map.entry(DOUBLE, (fieldEval, source) -> fieldEval),
@@ -86,6 +90,15 @@ public class ToDouble extends AbstractConvertFunction {
super(source, field);
}
+ private ToDouble(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
protected Map factories() {
return EVALUATORS;
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java
index 4692155fc973..51cb08137a58 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java
@@ -8,6 +8,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.ConvertEvaluator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
import org.elasticsearch.xpack.esql.core.tree.NodeInfo;
@@ -17,6 +19,7 @@ import org.elasticsearch.xpack.esql.expression.function.Example;
import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
+import java.io.IOException;
import java.util.List;
import java.util.Map;
@@ -26,6 +29,11 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT;
import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToSpatial;
public class ToGeoPoint extends AbstractConvertFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(
+ Expression.class,
+ "ToGeoPoint",
+ ToGeoPoint::new
+ );
private static final Map EVALUATORS = Map.ofEntries(
Map.entry(GEO_POINT, (fieldEval, source) -> fieldEval),
@@ -52,6 +60,15 @@ public class ToGeoPoint extends AbstractConvertFunction {
super(source, field);
}
+ private ToGeoPoint(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
protected Map factories() {
return EVALUATORS;
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShape.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShape.java
index 93acd578f86c..00e9fb3e598f 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShape.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShape.java
@@ -8,6 +8,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.ConvertEvaluator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
import org.elasticsearch.xpack.esql.core.tree.NodeInfo;
@@ -17,6 +19,7 @@ import org.elasticsearch.xpack.esql.expression.function.Example;
import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
+import java.io.IOException;
import java.util.List;
import java.util.Map;
@@ -27,6 +30,11 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT;
import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToSpatial;
public class ToGeoShape extends AbstractConvertFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(
+ Expression.class,
+ "ToGeoShape",
+ ToGeoShape::new
+ );
private static final Map EVALUATORS = Map.ofEntries(
Map.entry(GEO_POINT, (fieldEval, source) -> fieldEval),
@@ -54,6 +62,15 @@ public class ToGeoShape extends AbstractConvertFunction {
super(source, field);
}
+ private ToGeoShape(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
protected Map factories() {
return EVALUATORS;
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java
index 92467d98472c..6df85948d94e 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java
@@ -8,6 +8,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.ConvertEvaluator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
import org.elasticsearch.xpack.esql.core.tree.NodeInfo;
@@ -17,6 +19,7 @@ import org.elasticsearch.xpack.esql.expression.function.Example;
import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
+import java.io.IOException;
import java.util.List;
import java.util.Map;
@@ -26,6 +29,7 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.TEXT;
import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToIP;
public class ToIP extends AbstractConvertFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "ToIP", ToIP::new);
private static final Map EVALUATORS = Map.ofEntries(
Map.entry(IP, (field, source) -> field),
@@ -58,6 +62,15 @@ public class ToIP extends AbstractConvertFunction {
super(source, field);
}
+ private ToIP(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
protected Map factories() {
return EVALUATORS;
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java
index aa926eee3a55..1785160594a7 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java
@@ -8,6 +8,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.ConvertEvaluator;
import org.elasticsearch.xpack.esql.core.InvalidArgumentException;
import org.elasticsearch.xpack.esql.core.expression.Expression;
@@ -18,6 +20,7 @@ import org.elasticsearch.xpack.esql.expression.function.Example;
import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
+import java.io.IOException;
import java.util.List;
import java.util.Map;
@@ -34,6 +37,11 @@ import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToIn
import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToInt;
public class ToInteger extends AbstractConvertFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(
+ Expression.class,
+ "ToInteger",
+ ToInteger::new
+ );
private static final Map EVALUATORS = Map.ofEntries(
Map.entry(INTEGER, (fieldEval, source) -> fieldEval),
@@ -76,6 +84,15 @@ public class ToInteger extends AbstractConvertFunction {
super(source, field);
}
+ private ToInteger(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
protected Map factories() {
return EVALUATORS;
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java
index 182c1c923a96..4811051c3f48 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java
@@ -8,6 +8,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.ConvertEvaluator;
import org.elasticsearch.xpack.esql.core.InvalidArgumentException;
import org.elasticsearch.xpack.esql.core.expression.Expression;
@@ -18,6 +20,7 @@ import org.elasticsearch.xpack.esql.expression.function.Example;
import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
+import java.io.IOException;
import java.util.List;
import java.util.Map;
@@ -34,6 +37,7 @@ import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToLo
import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToLong;
public class ToLong extends AbstractConvertFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "ToLong", ToLong::new);
private static final Map EVALUATORS = Map.ofEntries(
Map.entry(LONG, (fieldEval, source) -> fieldEval),
@@ -86,6 +90,15 @@ public class ToLong extends AbstractConvertFunction {
super(source, field);
}
+ private ToLong(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
protected Map factories() {
return EVALUATORS;
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java
index 2fd8ae1eb0bd..a73c75a4e823 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java
@@ -7,6 +7,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.ConvertEvaluator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
import org.elasticsearch.xpack.esql.core.tree.NodeInfo;
@@ -17,6 +19,7 @@ import org.elasticsearch.xpack.esql.expression.function.Example;
import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
+import java.io.IOException;
import java.util.List;
import java.util.Map;
@@ -30,6 +33,12 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG;
* to radians.
*/
public class ToRadians extends AbstractConvertFunction implements EvaluatorMapper {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(
+ Expression.class,
+ "ToRadians",
+ ToRadians::new
+ );
+
private static final Map EVALUATORS = Map.ofEntries(
Map.entry(DOUBLE, ToRadiansEvaluator.Factory::new),
Map.entry(INTEGER, (field, source) -> new ToRadiansEvaluator.Factory(new ToDoubleFromIntEvaluator.Factory(field, source), source)),
@@ -56,6 +65,15 @@ public class ToRadians extends AbstractConvertFunction implements EvaluatorMappe
super(source, field);
}
+ private ToRadians(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
protected Map factories() {
return EVALUATORS;
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java
index 141fa067e54e..cb9eae6b5f43 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java
@@ -8,6 +8,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.ConvertEvaluator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
import org.elasticsearch.xpack.esql.core.tree.NodeInfo;
@@ -18,6 +20,7 @@ import org.elasticsearch.xpack.esql.expression.function.Example;
import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
+import java.io.IOException;
import java.util.List;
import java.util.Map;
@@ -43,6 +46,7 @@ import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLo
import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.versionToString;
public class ToString extends AbstractConvertFunction implements EvaluatorMapper {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "ToString", ToString::new);
private static final Map EVALUATORS = Map.ofEntries(
Map.entry(KEYWORD, (fieldEval, source) -> fieldEval),
@@ -93,6 +97,15 @@ public class ToString extends AbstractConvertFunction implements EvaluatorMapper
super(source, v);
}
+ private ToString(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
protected Map factories() {
return EVALUATORS;
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java
index ad6e935f7d6a..bfbfcf44b394 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java
@@ -8,6 +8,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.ConvertEvaluator;
import org.elasticsearch.xpack.esql.core.InvalidArgumentException;
import org.elasticsearch.xpack.esql.core.expression.Expression;
@@ -18,6 +20,7 @@ import org.elasticsearch.xpack.esql.expression.function.Example;
import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
+import java.io.IOException;
import java.util.List;
import java.util.Map;
@@ -36,6 +39,11 @@ import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.longToUnsi
import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToUnsignedLong;
public class ToUnsignedLong extends AbstractConvertFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(
+ Expression.class,
+ "ToUnsignedLong",
+ ToUnsignedLong::new
+ );
private static final Map EVALUATORS = Map.ofEntries(
Map.entry(UNSIGNED_LONG, (fieldEval, source) -> fieldEval),
@@ -77,6 +85,15 @@ public class ToUnsignedLong extends AbstractConvertFunction {
super(source, field);
}
+ private ToUnsignedLong(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
protected Map factories() {
return EVALUATORS;
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java
index 1941dd00837b..f6002c3c6bb1 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java
@@ -8,6 +8,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.ConvertEvaluator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
import org.elasticsearch.xpack.esql.core.tree.NodeInfo;
@@ -17,6 +19,7 @@ import org.elasticsearch.xpack.esql.expression.function.Example;
import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
+import java.io.IOException;
import java.util.List;
import java.util.Map;
@@ -26,6 +29,11 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION;
import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.stringToVersion;
public class ToVersion extends AbstractConvertFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(
+ Expression.class,
+ "ToVersion",
+ ToVersion::new
+ );
private static final Map EVALUATORS = Map.ofEntries(
Map.entry(VERSION, (fieldEval, source) -> fieldEval),
@@ -49,6 +57,15 @@ public class ToVersion extends AbstractConvertFunction {
super(source, v);
}
+ private ToVersion(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
protected Map factories() {
return EVALUATORS;
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java
index b821b8449a5a..363b70ef5ed1 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java
@@ -7,6 +7,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.Evaluator;
import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator;
import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException;
@@ -19,10 +21,13 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction;
+import java.io.IOException;
import java.util.List;
import java.util.function.Function;
public class Abs extends UnaryScalarFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Abs", Abs::new);
+
@FunctionInfo(
returnType = { "double", "integer", "long", "unsigned_long" },
description = "Returns the absolute value.",
@@ -39,6 +44,15 @@ public class Abs extends UnaryScalarFunction {
super(source, n);
}
+ private Abs(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Evaluator(extraName = "Double")
static double process(double fieldVal) {
return Math.abs(fieldVal);
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractTrigonometricFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractTrigonometricFunction.java
index fee52567d161..8353fe24b3dd 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractTrigonometricFunction.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractTrigonometricFunction.java
@@ -7,6 +7,7 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.operator.EvalOperator;
import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
@@ -14,6 +15,7 @@ import org.elasticsearch.xpack.esql.core.tree.Source;
import org.elasticsearch.xpack.esql.core.type.DataType;
import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction;
+import java.io.IOException;
import java.util.function.Function;
import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT;
@@ -27,6 +29,10 @@ abstract class AbstractTrigonometricFunction extends UnaryScalarFunction {
super(source, field);
}
+ protected AbstractTrigonometricFunction(StreamInput in) throws IOException {
+ super(in);
+ }
+
/**
* Build an evaluator for this function given the evaluator for it's input.
*/
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java
index e3c83f2f4abc..a87fa8ad48bb 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java
@@ -7,6 +7,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.Evaluator;
import org.elasticsearch.compute.operator.EvalOperator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
@@ -16,12 +18,15 @@ import org.elasticsearch.xpack.esql.expression.function.Example;
import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
+import java.io.IOException;
import java.util.List;
/**
* Inverse cosine trigonometric function.
*/
public class Acos extends AbstractTrigonometricFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Acos", Acos::new);
+
@FunctionInfo(
returnType = "double",
description = "Returns the {wikipedia}/Inverse_trigonometric_functions[arccosine] of `n` as an angle, expressed in radians.",
@@ -38,6 +43,15 @@ public class Acos extends AbstractTrigonometricFunction {
super(source, n);
}
+ private Acos(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
protected EvalOperator.ExpressionEvaluator.Factory doubleEvaluator(EvalOperator.ExpressionEvaluator.Factory field) {
return new AcosEvaluator.Factory(source(), field);
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java
index bc2de0a5b511..5d8c71a461ca 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java
@@ -7,6 +7,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.Evaluator;
import org.elasticsearch.compute.operator.EvalOperator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
@@ -16,12 +18,15 @@ import org.elasticsearch.xpack.esql.expression.function.Example;
import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
+import java.io.IOException;
import java.util.List;
/**
* Inverse cosine trigonometric function.
*/
public class Asin extends AbstractTrigonometricFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Asin", Asin::new);
+
@FunctionInfo(
returnType = "double",
description = "Returns the {wikipedia}/Inverse_trigonometric_functions[arcsine] of the input\n"
@@ -39,6 +44,15 @@ public class Asin extends AbstractTrigonometricFunction {
super(source, n);
}
+ private Asin(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
protected EvalOperator.ExpressionEvaluator.Factory doubleEvaluator(EvalOperator.ExpressionEvaluator.Factory field) {
return new AsinEvaluator.Factory(source(), field);
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java
index d840faf6d970..d90b12dfef43 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java
@@ -7,6 +7,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.Evaluator;
import org.elasticsearch.compute.operator.EvalOperator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
@@ -16,12 +18,15 @@ import org.elasticsearch.xpack.esql.expression.function.Example;
import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
+import java.io.IOException;
import java.util.List;
/**
* Inverse cosine trigonometric function.
*/
public class Atan extends AbstractTrigonometricFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Atan", Atan::new);
+
@FunctionInfo(
returnType = "double",
description = "Returns the {wikipedia}/Inverse_trigonometric_functions[arctangent] of the input\n"
@@ -39,6 +44,15 @@ public class Atan extends AbstractTrigonometricFunction {
super(source, n);
}
+ private Atan(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
protected EvalOperator.ExpressionEvaluator.Factory doubleEvaluator(EvalOperator.ExpressionEvaluator.Factory field) {
return new AtanEvaluator.Factory(source(), field);
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cbrt.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cbrt.java
index 43c0353de490..364e91aad8b1 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cbrt.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cbrt.java
@@ -7,6 +7,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.Evaluator;
import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator;
import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException;
@@ -19,6 +21,7 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction;
+import java.io.IOException;
import java.util.List;
import java.util.function.Function;
@@ -27,6 +30,8 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNum
import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToDouble;
public class Cbrt extends UnaryScalarFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Cbrt", Cbrt::new);
+
@FunctionInfo(returnType = "double", description = """
Returns the cube root of a number. The input can be any numeric value, the return value is always a double.
Cube roots of infinities are null.""", examples = @Example(file = "math", tag = "cbrt"))
@@ -41,6 +46,15 @@ public class Cbrt extends UnaryScalarFunction {
super(source, n);
}
+ private Cbrt(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) {
var field = toEvaluator.apply(field());
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java
index 06f092a00e94..7d31cec0e54a 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java
@@ -7,6 +7,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.Evaluator;
import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
@@ -17,6 +19,7 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction;
+import java.io.IOException;
import java.util.List;
import java.util.function.Function;
@@ -31,6 +34,8 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNum
*
*/
public class Ceil extends UnaryScalarFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Ceil", Ceil::new);
+
@FunctionInfo(
returnType = { "double", "integer", "long", "unsigned_long" },
description = "Round a number up to the nearest integer.",
@@ -49,6 +54,15 @@ public class Ceil extends UnaryScalarFunction {
super(source, n);
}
+ private Ceil(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) {
if (dataType().isInteger()) {
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java
index 29387606a497..4ae134a8d6c2 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java
@@ -7,6 +7,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.Evaluator;
import org.elasticsearch.compute.operator.EvalOperator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
@@ -16,12 +18,15 @@ import org.elasticsearch.xpack.esql.expression.function.Example;
import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
+import java.io.IOException;
import java.util.List;
/**
* Cosine trigonometric function.
*/
public class Cos extends AbstractTrigonometricFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Cos", Cos::new);
+
@FunctionInfo(
returnType = "double",
description = "Returns the {wikipedia}/Sine_and_cosine[cosine] of an angle.",
@@ -38,6 +43,15 @@ public class Cos extends AbstractTrigonometricFunction {
super(source, angle);
}
+ private Cos(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
protected EvalOperator.ExpressionEvaluator.Factory doubleEvaluator(EvalOperator.ExpressionEvaluator.Factory field) {
return new CosEvaluator.Factory(source(), field);
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java
index 5e0a8c9b970f..0cfbc195186f 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java
@@ -7,6 +7,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.Evaluator;
import org.elasticsearch.compute.operator.EvalOperator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
@@ -16,12 +18,15 @@ import org.elasticsearch.xpack.esql.expression.function.Example;
import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
+import java.io.IOException;
import java.util.List;
/**
* Cosine hyperbolic function.
*/
public class Cosh extends AbstractTrigonometricFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Cosh", Cosh::new);
+
@FunctionInfo(
returnType = "double",
description = "Returns the {wikipedia}/Hyperbolic_functions[hyperbolic cosine] of an angle.",
@@ -38,6 +43,15 @@ public class Cosh extends AbstractTrigonometricFunction {
super(source, angle);
}
+ private Cosh(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
protected EvalOperator.ExpressionEvaluator.Factory doubleEvaluator(EvalOperator.ExpressionEvaluator.Factory field) {
return new CoshEvaluator.Factory(source(), field);
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java
index 173a8ef15230..73ff0aec2b12 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java
@@ -7,6 +7,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.Evaluator;
import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
@@ -17,6 +19,7 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction;
+import java.io.IOException;
import java.util.List;
import java.util.function.Function;
@@ -31,6 +34,8 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNum
*
*/
public class Floor extends UnaryScalarFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Floor", Floor::new);
+
@FunctionInfo(
returnType = { "double", "integer", "long", "unsigned_long" },
description = "Round a number down to the nearest integer.",
@@ -51,6 +56,15 @@ public class Floor extends UnaryScalarFunction {
super(source, n);
}
+ private Floor(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) {
if (dataType().isInteger()) {
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java
index 69b7efac9b7e..ae725f6ed649 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java
@@ -7,6 +7,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.Evaluator;
import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator;
import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException;
@@ -20,6 +22,7 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction;
+import java.io.IOException;
import java.util.List;
import java.util.function.Function;
@@ -28,6 +31,8 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNum
import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToDouble;
public class Log10 extends UnaryScalarFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Log10", Log10::new);
+
@FunctionInfo(
returnType = "double",
description = "Returns the logarithm of a value to base 10. The input can "
@@ -47,6 +52,15 @@ public class Log10 extends UnaryScalarFunction {
super(source, n);
}
+ private Log10(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) {
var field = toEvaluator.apply(field());
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Signum.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Signum.java
index 7769e8c6c4a2..e78c2ce90e6c 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Signum.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Signum.java
@@ -7,6 +7,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.Evaluator;
import org.elasticsearch.compute.operator.EvalOperator;
import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException;
@@ -20,10 +22,13 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction;
+import java.io.IOException;
import java.util.List;
import java.util.function.Function;
public class Signum extends UnaryScalarFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Signum", Signum::new);
+
@FunctionInfo(
returnType = { "double" },
description = "Returns the sign of the given number.\n"
@@ -41,6 +46,15 @@ public class Signum extends UnaryScalarFunction {
super(source, n);
}
+ private Signum(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
public EvalOperator.ExpressionEvaluator.Factory toEvaluator(
Function toEvaluator
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java
index 2dd9520ab066..526b17fb3eb2 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java
@@ -7,6 +7,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.Evaluator;
import org.elasticsearch.compute.operator.EvalOperator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
@@ -16,12 +18,14 @@ import org.elasticsearch.xpack.esql.expression.function.Example;
import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
+import java.io.IOException;
import java.util.List;
/**
* Sine trigonometric function.
*/
public class Sin extends AbstractTrigonometricFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Sin", Sin::new);
@FunctionInfo(
returnType = "double",
@@ -39,6 +43,15 @@ public class Sin extends AbstractTrigonometricFunction {
super(source, angle);
}
+ private Sin(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
protected EvalOperator.ExpressionEvaluator.Factory doubleEvaluator(EvalOperator.ExpressionEvaluator.Factory field) {
return new SinEvaluator.Factory(source(), field);
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java
index 274fb938f68d..f89e626955d7 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java
@@ -7,6 +7,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.Evaluator;
import org.elasticsearch.compute.operator.EvalOperator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
@@ -16,12 +18,15 @@ import org.elasticsearch.xpack.esql.expression.function.Example;
import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
+import java.io.IOException;
import java.util.List;
/**
* Sine hyperbolic function.
*/
public class Sinh extends AbstractTrigonometricFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Sinh", Sinh::new);
+
@FunctionInfo(
returnType = "double",
description = "Returns the {wikipedia}/Hyperbolic_functions[hyperbolic sine] of an angle.",
@@ -38,6 +43,15 @@ public class Sinh extends AbstractTrigonometricFunction {
super(source, angle);
}
+ private Sinh(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
protected EvalOperator.ExpressionEvaluator.Factory doubleEvaluator(EvalOperator.ExpressionEvaluator.Factory field) {
return new SinhEvaluator.Factory(source(), field);
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java
index a27929b0b5d0..d1af693d8aa7 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java
@@ -7,6 +7,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.Evaluator;
import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator;
import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException;
@@ -19,6 +21,7 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction;
+import java.io.IOException;
import java.util.List;
import java.util.function.Function;
@@ -27,6 +30,8 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNum
import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.unsignedLongToDouble;
public class Sqrt extends UnaryScalarFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Sqrt", Sqrt::new);
+
@FunctionInfo(returnType = "double", description = """
Returns the square root of a number. The input can be any numeric value, the return value is always a double.
Square roots of negative numbers and infinities are null.""", examples = @Example(file = "math", tag = "sqrt"))
@@ -41,6 +46,15 @@ public class Sqrt extends UnaryScalarFunction {
super(source, n);
}
+ private Sqrt(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) {
var field = toEvaluator.apply(field());
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java
index e0ae6ff5234e..85cdba0db468 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java
@@ -7,6 +7,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.Evaluator;
import org.elasticsearch.compute.operator.EvalOperator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
@@ -16,12 +18,15 @@ import org.elasticsearch.xpack.esql.expression.function.Example;
import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
+import java.io.IOException;
import java.util.List;
/**
* Tangent trigonometric function.
*/
public class Tan extends AbstractTrigonometricFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Tan", Tan::new);
+
@FunctionInfo(
returnType = "double",
description = "Returns the {wikipedia}/Sine_and_cosine[Tangent] trigonometric function of an angle.",
@@ -38,6 +43,15 @@ public class Tan extends AbstractTrigonometricFunction {
super(source, angle);
}
+ private Tan(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
protected EvalOperator.ExpressionEvaluator.Factory doubleEvaluator(EvalOperator.ExpressionEvaluator.Factory field) {
return new TanEvaluator.Factory(source(), field);
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java
index 5d423eaf2dd3..0cd4051968c7 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java
@@ -7,6 +7,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.Evaluator;
import org.elasticsearch.compute.operator.EvalOperator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
@@ -16,12 +18,15 @@ import org.elasticsearch.xpack.esql.expression.function.Example;
import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
+import java.io.IOException;
import java.util.List;
/**
* Tangent hyperbolic function.
*/
public class Tanh extends AbstractTrigonometricFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Tanh", Tanh::new);
+
@FunctionInfo(
returnType = "double",
description = "Returns the {wikipedia}/Hyperbolic_functions[Tangent] hyperbolic function of an angle.",
@@ -38,6 +43,15 @@ public class Tanh extends AbstractTrigonometricFunction {
super(source, angle);
}
+ private Tanh(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
protected EvalOperator.ExpressionEvaluator.Factory doubleEvaluator(EvalOperator.ExpressionEvaluator.Factory field) {
return new TanhEvaluator.Factory(source(), field);
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java
index bf0e10f817e3..7e7a024ba2c4 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/package-info.java
@@ -54,7 +54,7 @@
*
*
* Find a function in this package similar to the one you are working on and copy it to build
- * yours. There's some ceremony required in each function class to make it constant foldable
+ * yours. There's some ceremony required in each function class to make it constant foldable,
* and return the right types. Take a stab at these, but don't worry too much about getting
* it right. Your function might extend from one of several abstract base classes, all of
* those are fine for this guide, but might have special instructions called out later.
@@ -104,9 +104,21 @@
*
*
* Add your function to {@link org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry}.
- * This links it into the language and {@code META FUNCTIONS}. Also add your function to
- * {@link org.elasticsearch.xpack.esql.io.stream.PlanNamedTypes}. This makes your function
- * serializable over the wire. Mostly you can copy existing implementations for both.
+ * This links it into the language and {@code META FUNCTIONS}.
+ *
+ *
+ * Register your function for serialization. We're in the process of migrating this serialization
+ * from an older way to the more common, {@link org.elasticsearch.common.io.stream.NamedWriteable}.
+ *
+ * All subclasses of {@link org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction}
+ * are migrated and should include a "getWriteableName", "writeTo", and a deserializing constructor.
+ * They should also include a {@link org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry}
+ * and it should be linked in {@link org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction}.
+ *
+ *
+ * Other functions serialized in {@link org.elasticsearch.xpack.esql.io.stream.PlanNamedTypes}
+ * and you should copy what's done there.
+ *
*
*
* Rerun the {@code CsvTests}. They should find your function and maybe even pass. Add a
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StX.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StX.java
index 2af1a353a39d..18046135933b 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StX.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StX.java
@@ -8,6 +8,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.spatial;
import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.ConvertEvaluator;
import org.elasticsearch.compute.operator.EvalOperator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
@@ -20,6 +22,7 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction;
+import java.io.IOException;
import java.util.List;
import java.util.function.Function;
@@ -35,6 +38,8 @@ import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isSpat
* Alternatively it is well described in PostGIS documentation at PostGIS:ST_X.
*/
public class StX extends UnaryScalarFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "StX", StX::new);
+
@FunctionInfo(
returnType = "double",
description = "Extracts the `x` coordinate from the supplied point.\n"
@@ -52,6 +57,15 @@ public class StX extends UnaryScalarFunction {
super(source, field);
}
+ private StX(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
protected Expression.TypeResolution resolveType() {
return isSpatialPoint(field(), sourceText(), TypeResolutions.ParamOrdinal.DEFAULT);
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StY.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StY.java
index 266c5f986c52..bf97c3e2a354 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StY.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StY.java
@@ -8,6 +8,8 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.spatial;
import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.ConvertEvaluator;
import org.elasticsearch.compute.operator.EvalOperator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
@@ -20,6 +22,7 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction;
+import java.io.IOException;
import java.util.List;
import java.util.function.Function;
@@ -35,6 +38,8 @@ import static org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions.isSpat
* Alternatively it is well described in PostGIS documentation at PostGIS:ST_Y.
*/
public class StY extends UnaryScalarFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "StY", StY::new);
+
@FunctionInfo(
returnType = "double",
description = "Extracts the `y` coordinate from the supplied point.\n"
@@ -52,6 +57,15 @@ public class StY extends UnaryScalarFunction {
super(source, field);
}
+ private StY(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
protected TypeResolution resolveType() {
return isSpatialPoint(field(), sourceText(), TypeResolutions.ParamOrdinal.DEFAULT);
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java
index e7e0b69d5149..ece70da51ef1 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java
@@ -9,6 +9,8 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.string;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.UnicodeUtil;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.Evaluator;
import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
@@ -20,6 +22,7 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction;
+import java.io.IOException;
import java.util.List;
import java.util.function.Function;
@@ -29,6 +32,8 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isStr
* Removes leading whitespaces from a string.
*/
public class LTrim extends UnaryScalarFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "LTrim", LTrim::new);
+
@FunctionInfo(
returnType = { "keyword", "text" },
description = "Removes leading whitespaces from a string.",
@@ -45,6 +50,15 @@ public class LTrim extends UnaryScalarFunction {
super(source, str);
}
+ private LTrim(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
protected TypeResolution resolveType() {
if (childrenResolved() == false) {
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java
index e2beda9612b0..241eab6d5b90 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java
@@ -9,6 +9,8 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.string;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.UnicodeUtil;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.Evaluator;
import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
@@ -20,6 +22,7 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction;
+import java.io.IOException;
import java.util.List;
import java.util.function.Function;
@@ -27,6 +30,7 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.Param
import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString;
public class Length extends UnaryScalarFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Length", Length::new);
@FunctionInfo(
returnType = "integer",
@@ -44,6 +48,15 @@ public class Length extends UnaryScalarFunction {
super(source, field);
}
+ private Length(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
public DataType dataType() {
return DataType.INTEGER;
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java
index 9edac22ca643..4c210607cfbe 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java
@@ -9,6 +9,8 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.string;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.UnicodeUtil;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.Evaluator;
import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
@@ -20,6 +22,7 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction;
+import java.io.IOException;
import java.util.List;
import java.util.function.Function;
@@ -29,6 +32,8 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isStr
* Removes trailing whitespaces from a string.
*/
public class RTrim extends UnaryScalarFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "RTrim", RTrim::new);
+
@FunctionInfo(
returnType = { "keyword", "text" },
description = "Removes trailing whitespaces from a string.",
@@ -45,6 +50,15 @@ public class RTrim extends UnaryScalarFunction {
super(source, str);
}
+ private RTrim(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
protected TypeResolution resolveType() {
if (childrenResolved() == false) {
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Trim.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Trim.java
index d7d9019a7fba..36dc3d97992a 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Trim.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Trim.java
@@ -9,6 +9,8 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.string;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.UnicodeUtil;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.Evaluator;
import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
@@ -20,6 +22,7 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction;
+import java.io.IOException;
import java.util.List;
import java.util.function.Function;
@@ -29,6 +32,8 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isStr
* Removes leading and trailing whitespaces from a string.
*/
public final class Trim extends UnaryScalarFunction {
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Trim", Trim::new);
+
@FunctionInfo(
returnType = { "keyword", "text" },
description = "Removes leading and trailing whitespaces from a string.",
@@ -45,6 +50,15 @@ public final class Trim extends UnaryScalarFunction {
super(source, str);
}
+ private Trim(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
+ }
+
@Override
protected TypeResolution resolveType() {
if (childrenResolved() == false) {
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java
index 50fb5c58e200..d1ed5579c448 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java
@@ -7,6 +7,8 @@
package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.compute.ann.Evaluator;
import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator;
import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException;
@@ -15,9 +17,9 @@ import org.elasticsearch.xpack.esql.core.expression.Expression;
import org.elasticsearch.xpack.esql.core.tree.NodeInfo;
import org.elasticsearch.xpack.esql.core.tree.Source;
import org.elasticsearch.xpack.esql.core.type.DataType;
-import org.elasticsearch.xpack.esql.expression.function.Warnings;
import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction;
+import java.io.IOException;
import java.time.Duration;
import java.time.Period;
import java.util.List;
@@ -30,12 +32,19 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.TIME_DURATION;
import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.isTemporalAmount;
public class Neg extends UnaryScalarFunction {
-
- private final Warnings warnings;
+ public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Neg", Neg::new);
public Neg(Source source, Expression field) {
super(source, field);
- warnings = new Warnings(source);
+ }
+
+ public Neg(StreamInput in) throws IOException {
+ super(in);
+ }
+
+ @Override
+ public String getWriteableName() {
+ return ENTRY.name;
}
@Override
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNameRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNameRegistry.java
index d5eb5984e2e6..15368dc0fdb3 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNameRegistry.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNameRegistry.java
@@ -7,6 +7,8 @@
package org.elasticsearch.xpack.esql.io.stream;
+import org.elasticsearch.common.io.stream.NamedWriteable;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
@@ -80,8 +82,6 @@ public class PlanNameRegistry {
record Entry(
/** The superclass of a writeable category will be read by a reader. */
Class> categoryClass,
- /** The concrete class. */
- Class> concreteClass,
/** A name for the writeable which is unique to the categoryClass. */
String name,
/** A writer for non-NamedWriteable class */
@@ -104,7 +104,16 @@ public class PlanNameRegistry {
PlanWriter writer,
PlanReader reader
) {
- return new Entry(categoryClass, concreteClass, PlanNamedTypes.name(concreteClass), writer, reader);
+ return new Entry(categoryClass, PlanNamedTypes.name(concreteClass), writer, reader);
+ }
+
+ static Entry of(Class categoryClass, NamedWriteableRegistry.Entry entry) {
+ return new Entry(
+ categoryClass,
+ entry.name,
+ (o, v) -> categoryClass.cast(v).writeTo(o),
+ in -> categoryClass.cast(entry.reader.read(in))
+ );
}
static Entry of(
@@ -113,7 +122,7 @@ public class PlanNameRegistry {
PlanWriter writer,
PlanNamedReader reader
) {
- return new Entry(categoryClass, concreteClass, PlanNamedTypes.name(concreteClass), writer, reader);
+ return new Entry(categoryClass, PlanNamedTypes.name(concreteClass), writer, reader);
}
}
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java
index 20d9907c61ba..795790949f66 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java
@@ -12,6 +12,7 @@ import org.elasticsearch.TransportVersion;
import org.elasticsearch.TransportVersions;
import org.elasticsearch.common.TriFunction;
import org.elasticsearch.common.io.stream.NamedWriteable;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.BytesRefs;
@@ -26,10 +27,8 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute;
import org.elasticsearch.xpack.esql.core.expression.Expression;
import org.elasticsearch.xpack.esql.core.expression.FieldAttribute;
import org.elasticsearch.xpack.esql.core.expression.Literal;
-import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute;
import org.elasticsearch.xpack.esql.core.expression.NamedExpression;
import org.elasticsearch.xpack.esql.core.expression.Order;
-import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute;
import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction;
import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And;
import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogic;
@@ -68,23 +67,6 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFuncti
import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case;
import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest;
import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Least;
-import org.elasticsearch.xpack.esql.expression.function.scalar.convert.FromBase64;
-import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBase64;
-import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBoolean;
-import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianPoint;
-import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianShape;
-import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDatetime;
-import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDegrees;
-import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDouble;
-import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoPoint;
-import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoShape;
-import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToIP;
-import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToInteger;
-import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToLong;
-import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToRadians;
-import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToString;
-import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToUnsignedLong;
-import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToVersion;
import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateDiff;
import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateExtract;
import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateFormat;
@@ -93,28 +75,12 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc;
import org.elasticsearch.xpack.esql.expression.function.scalar.date.Now;
import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch;
import org.elasticsearch.xpack.esql.expression.function.scalar.ip.IpPrefix;
-import org.elasticsearch.xpack.esql.expression.function.scalar.math.Abs;
-import org.elasticsearch.xpack.esql.expression.function.scalar.math.Acos;
-import org.elasticsearch.xpack.esql.expression.function.scalar.math.Asin;
-import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan;
import org.elasticsearch.xpack.esql.expression.function.scalar.math.Atan2;
-import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cbrt;
-import org.elasticsearch.xpack.esql.expression.function.scalar.math.Ceil;
-import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cos;
-import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cosh;
import org.elasticsearch.xpack.esql.expression.function.scalar.math.E;
-import org.elasticsearch.xpack.esql.expression.function.scalar.math.Floor;
import org.elasticsearch.xpack.esql.expression.function.scalar.math.Log;
-import org.elasticsearch.xpack.esql.expression.function.scalar.math.Log10;
import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pi;
import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pow;
import org.elasticsearch.xpack.esql.expression.function.scalar.math.Round;
-import org.elasticsearch.xpack.esql.expression.function.scalar.math.Signum;
-import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sin;
-import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sinh;
-import org.elasticsearch.xpack.esql.expression.function.scalar.math.Sqrt;
-import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tan;
-import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tanh;
import org.elasticsearch.xpack.esql.expression.function.scalar.math.Tau;
import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.AbstractMultivalueFunction;
import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvAppend;
@@ -137,16 +103,11 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialDi
import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialIntersects;
import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction;
import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialWithin;
-import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StX;
-import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StY;
import org.elasticsearch.xpack.esql.expression.function.scalar.string.Concat;
import org.elasticsearch.xpack.esql.expression.function.scalar.string.EndsWith;
-import org.elasticsearch.xpack.esql.expression.function.scalar.string.LTrim;
import org.elasticsearch.xpack.esql.expression.function.scalar.string.Left;
-import org.elasticsearch.xpack.esql.expression.function.scalar.string.Length;
import org.elasticsearch.xpack.esql.expression.function.scalar.string.Locate;
import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike;
-import org.elasticsearch.xpack.esql.expression.function.scalar.string.RTrim;
import org.elasticsearch.xpack.esql.expression.function.scalar.string.Repeat;
import org.elasticsearch.xpack.esql.expression.function.scalar.string.Replace;
import org.elasticsearch.xpack.esql.expression.function.scalar.string.Right;
@@ -155,13 +116,11 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.StartsWith
import org.elasticsearch.xpack.esql.expression.function.scalar.string.Substring;
import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToLower;
import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToUpper;
-import org.elasticsearch.xpack.esql.expression.function.scalar.string.Trim;
import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike;
import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add;
import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div;
import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mod;
import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mul;
-import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Neg;
import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Sub;
import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals;
import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.EsqlBinaryComparison;
@@ -211,6 +170,7 @@ import org.elasticsearch.xpack.esql.plan.physical.ShowExec;
import org.elasticsearch.xpack.esql.plan.physical.TopNExec;
import java.io.IOException;
+import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -261,7 +221,7 @@ public final class PlanNamedTypes {
* Entries have the form: category, name, serializer method, deserializer method.
*/
public static List namedTypeEntries() {
- return List.of(
+ List declared = List.of(
// Physical Plan Nodes
of(PhysicalPlan.class, AggregateExec.class, PlanNamedTypes::writeAggregateExec, PlanNamedTypes::readAggregateExec),
of(PhysicalPlan.class, DissectExec.class, PlanNamedTypes::writeDissectExec, PlanNamedTypes::readDissectExec),
@@ -307,13 +267,6 @@ public final class PlanNamedTypes {
of(LogicalPlan.class, OrderBy.class, PlanNamedTypes::writeOrderBy, PlanNamedTypes::readOrderBy),
of(LogicalPlan.class, Project.class, PlanNamedTypes::writeProject, PlanNamedTypes::readProject),
of(LogicalPlan.class, TopN.class, PlanNamedTypes::writeTopN, PlanNamedTypes::readTopN),
- // Attributes
- of(Expression.class, FieldAttribute.class, (o, a) -> a.writeTo(o), FieldAttribute::new),
- of(Expression.class, ReferenceAttribute.class, (o, a) -> a.writeTo(o), ReferenceAttribute::new),
- of(Expression.class, MetadataAttribute.class, (o, a) -> a.writeTo(o), MetadataAttribute::new),
- of(Expression.class, UnsupportedAttribute.class, (o, a) -> a.writeTo(o), UnsupportedAttribute::new),
- // NamedExpressions
- of(Expression.class, Alias.class, (o, a) -> a.writeTo(o), Alias::new),
// BinaryComparison
of(EsqlBinaryComparison.class, Equals.class, PlanNamedTypes::writeBinComparison, PlanNamedTypes::readBinComparison),
of(EsqlBinaryComparison.class, NotEquals.class, PlanNamedTypes::writeBinComparison, PlanNamedTypes::readBinComparison),
@@ -340,46 +293,6 @@ public final class PlanNamedTypes {
of(QL_UNARY_SCLR_CLS, IsNotNull.class, PlanNamedTypes::writeQLUnaryScalar, PlanNamedTypes::readQLUnaryScalar),
of(QL_UNARY_SCLR_CLS, IsNull.class, PlanNamedTypes::writeQLUnaryScalar, PlanNamedTypes::readQLUnaryScalar),
of(QL_UNARY_SCLR_CLS, Not.class, PlanNamedTypes::writeQLUnaryScalar, PlanNamedTypes::readQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, Neg.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, Abs.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, Acos.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, Asin.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, Atan.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, Cbrt.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, Ceil.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, Cos.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, Cosh.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, Floor.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, FromBase64.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, Length.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, Log10.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, LTrim.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, RTrim.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, Signum.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, Sin.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, Sinh.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, Sqrt.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, StX.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, StY.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, Tan.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, Tanh.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, ToBase64.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, ToBoolean.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, ToCartesianPoint.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, ToDatetime.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, ToDegrees.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, ToDouble.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, ToGeoShape.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, ToCartesianShape.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, ToGeoPoint.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, ToIP.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, ToInteger.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, ToLong.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, ToRadians.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, ToString.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, ToUnsignedLong.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, ToVersion.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
- of(ESQL_UNARY_SCLR_CLS, Trim.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar),
// ScalarFunction
of(ScalarFunction.class, Atan2.class, PlanNamedTypes::writeAtan2, PlanNamedTypes::readAtan2),
of(ScalarFunction.class, Case.class, PlanNamedTypes::writeVararg, PlanNamedTypes::readVarag),
@@ -455,6 +368,18 @@ public final class PlanNamedTypes {
of(Expression.class, Literal.class, PlanNamedTypes::writeLiteral, PlanNamedTypes::readLiteral),
of(Expression.class, Order.class, PlanNamedTypes::writeOrder, PlanNamedTypes::readOrder)
);
+ List entries = new ArrayList<>(declared);
+
+ // From NamedWriteables
+ for (NamedWriteableRegistry.Entry e : UnaryScalarFunction.getNamedWriteables()) {
+ entries.add(of(ESQL_UNARY_SCLR_CLS, e));
+ }
+ for (NamedWriteableRegistry.Entry e : NamedExpression.getNamedWriteables()) {
+ entries.add(of(Expression.class, e));
+ }
+ entries.add(of(Expression.class, UnsupportedAttribute.ENTRY));
+
+ return entries;
}
// -- physical plan nodes
@@ -1172,64 +1097,6 @@ public final class PlanNamedTypes {
out.writeExpression(binaryLogic.right());
}
- // -- UnaryScalarFunction
-
- static final Map> ESQL_UNARY_SCALAR_CTRS = Map.ofEntries(
- entry(name(Abs.class), Abs::new),
- entry(name(Acos.class), Acos::new),
- entry(name(Asin.class), Asin::new),
- entry(name(Atan.class), Atan::new),
- entry(name(Cbrt.class), Cbrt::new),
- entry(name(Ceil.class), Ceil::new),
- entry(name(Cos.class), Cos::new),
- entry(name(Cosh.class), Cosh::new),
- entry(name(Floor.class), Floor::new),
- entry(name(FromBase64.class), FromBase64::new),
- entry(name(Length.class), Length::new),
- entry(name(Log10.class), Log10::new),
- entry(name(LTrim.class), LTrim::new),
- entry(name(RTrim.class), RTrim::new),
- entry(name(Neg.class), Neg::new),
- entry(name(Signum.class), Signum::new),
- entry(name(Sin.class), Sin::new),
- entry(name(Sinh.class), Sinh::new),
- entry(name(Sqrt.class), Sqrt::new),
- entry(name(StX.class), StX::new),
- entry(name(StY.class), StY::new),
- entry(name(Tan.class), Tan::new),
- entry(name(Tanh.class), Tanh::new),
- entry(name(ToBase64.class), ToBase64::new),
- entry(name(ToBoolean.class), ToBoolean::new),
- entry(name(ToCartesianPoint.class), ToCartesianPoint::new),
- entry(name(ToDatetime.class), ToDatetime::new),
- entry(name(ToDegrees.class), ToDegrees::new),
- entry(name(ToDouble.class), ToDouble::new),
- entry(name(ToGeoShape.class), ToGeoShape::new),
- entry(name(ToCartesianShape.class), ToCartesianShape::new),
- entry(name(ToGeoPoint.class), ToGeoPoint::new),
- entry(name(ToIP.class), ToIP::new),
- entry(name(ToInteger.class), ToInteger::new),
- entry(name(ToLong.class), ToLong::new),
- entry(name(ToRadians.class), ToRadians::new),
- entry(name(ToString.class), ToString::new),
- entry(name(ToUnsignedLong.class), ToUnsignedLong::new),
- entry(name(ToVersion.class), ToVersion::new),
- entry(name(Trim.class), Trim::new)
- );
-
- static UnaryScalarFunction readESQLUnaryScalar(PlanStreamInput in, String name) throws IOException {
- var ctr = ESQL_UNARY_SCALAR_CTRS.get(name);
- if (ctr == null) {
- throw new IOException("Constructor for ESQLUnaryScalar not found for name:" + name);
- }
- return ctr.apply(Source.readFrom(in), in.readExpression());
- }
-
- static void writeESQLUnaryScalar(PlanStreamOutput out, UnaryScalarFunction function) throws IOException {
- function.source().writeTo(out);
- out.writeExpression(function.field());
- }
-
static final Map> NO_ARG_SCALAR_CTRS = Map.ofEntries(
entry(name(E.class), E::new),
entry(name(Pi.class), Pi::new),
diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java
index 4fdc0bdab5ad..228ed6c5b4b3 100644
--- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java
+++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java
@@ -197,7 +197,7 @@ public class EsqlPlugin extends Plugin implements ActionPlugin {
entries.addAll(Attribute.getNamedWriteables());
entries.add(UnsupportedAttribute.ENTRY); // TODO combine with above once these are in the same project
entries.addAll(NamedExpression.getNamedWriteables());
- entries.add(UnsupportedAttribute.NAMED_EXPRESSION_ENTRY);
+ entries.add(UnsupportedAttribute.NAMED_EXPRESSION_ENTRY); // TODO combine with above once these are in the same project
return entries;
}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AbstractExpressionSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AbstractExpressionSerializationTests.java
new file mode 100644
index 000000000000..a5ce5e004b19
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AbstractExpressionSerializationTests.java
@@ -0,0 +1,90 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression;
+
+import org.elasticsearch.TransportVersion;
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.test.AbstractWireTestCase;
+import org.elasticsearch.xpack.esql.core.expression.Attribute;
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.expression.NamedExpression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.core.type.EsField;
+import org.elasticsearch.xpack.esql.expression.function.ReferenceAttributeTests;
+import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute;
+import org.elasticsearch.xpack.esql.io.stream.PlanNameRegistry;
+import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput;
+import org.elasticsearch.xpack.esql.io.stream.PlanStreamOutput;
+import org.elasticsearch.xpack.esql.session.EsqlConfiguration;
+import org.elasticsearch.xpack.esql.session.EsqlConfigurationSerializationTests;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+import static org.hamcrest.Matchers.equalTo;
+
+public abstract class AbstractExpressionSerializationTests extends AbstractWireTestCase {
+ public static Source randomSource() {
+ int lineNumber = between(0, EXAMPLE_QUERY.length - 1);
+ int offset = between(0, EXAMPLE_QUERY[lineNumber].length() - 2);
+ int length = between(1, EXAMPLE_QUERY[lineNumber].length() - offset - 1);
+ String text = EXAMPLE_QUERY[lineNumber].substring(offset, offset + length);
+ return new Source(lineNumber + 1, offset, text);
+ }
+
+ public static Expression randomChild() {
+ return ReferenceAttributeTests.randomReferenceAttribute();
+ }
+
+ @Override
+ protected final T copyInstance(T instance, TransportVersion version) throws IOException {
+ EsqlConfiguration config = EsqlConfigurationSerializationTests.randomConfiguration(
+ Arrays.stream(EXAMPLE_QUERY).collect(Collectors.joining("\n")),
+ Map.of()
+ );
+ return copyInstance(
+ instance,
+ getNamedWriteableRegistry(),
+ (out, v) -> new PlanStreamOutput(out, new PlanNameRegistry(), config).writeNamedWriteable(v),
+ in -> {
+ PlanStreamInput pin = new PlanStreamInput(in, new PlanNameRegistry(), in.namedWriteableRegistry(), config);
+ @SuppressWarnings("unchecked")
+ T deser = (T) pin.readNamedWriteable(Expression.class);
+ assertThat(deser.source(), equalTo(instance.source()));
+ return deser;
+ },
+ version
+ );
+ }
+
+ protected abstract List getNamedWriteables();
+
+ @Override
+ protected final NamedWriteableRegistry getNamedWriteableRegistry() {
+ List entries = new ArrayList<>(NamedExpression.getNamedWriteables());
+ entries.addAll(Attribute.getNamedWriteables());
+ entries.add(UnsupportedAttribute.ENTRY);
+ entries.addAll(EsField.getNamedWriteables());
+ entries.addAll(getNamedWriteables());
+ return new NamedWriteableRegistry(entries);
+ }
+
+ private static final String[] EXAMPLE_QUERY = new String[] {
+ "I am the very model of a modern Major-Gineral,",
+ "I've information vegetable, animal, and mineral,",
+ "I know the kings of England, and I quote the fights historical",
+ "From Marathon to Waterloo, in order categorical;",
+ "I'm very well acquainted, too, with matters mathematical,",
+ "I understand equations, both the simple and quadratical,",
+ "About binomial theorem I'm teeming with a lot o' news,",
+ "With many cheerful facts about the square of the hypotenuse." };
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AbstractUnaryScalarSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AbstractUnaryScalarSerializationTests.java
new file mode 100644
index 000000000000..d8290966acbd
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/AbstractUnaryScalarSerializationTests.java
@@ -0,0 +1,36 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression;
+
+import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction;
+
+import java.io.IOException;
+import java.util.List;
+
+public abstract class AbstractUnaryScalarSerializationTests extends AbstractExpressionSerializationTests {
+ protected abstract T create(Source source, Expression child);
+
+ @Override
+ protected final T createTestInstance() {
+ return create(randomSource(), randomChild());
+ }
+
+ @Override
+ protected final T mutateInstance(T instance) throws IOException {
+ Expression child = randomValueOtherThan(instance.field(), AbstractExpressionSerializationTests::randomChild);
+ return create(instance.source(), child);
+ }
+
+ @Override
+ protected List getNamedWriteables() {
+ return UnaryScalarFunction.getNamedWriteables();
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64SerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64SerializationTests.java
new file mode 100644
index 000000000000..eee637610ffd
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/FromBase64SerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class FromBase64SerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected FromBase64 create(Source source, Expression child) {
+ return new FromBase64(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64SerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64SerializationTests.java
new file mode 100644
index 000000000000..0eebe0d74c5b
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBase64SerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class ToBase64SerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected ToBase64 create(Source source, Expression child) {
+ return new ToBase64(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanSerializationTests.java
new file mode 100644
index 000000000000..0f94eb46110e
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanSerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class ToBooleanSerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected ToBoolean create(Source source, Expression child) {
+ return new ToBoolean(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointSerializationTests.java
new file mode 100644
index 000000000000..601320f9fbda
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointSerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class ToCartesianPointSerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected ToCartesianPoint create(Source source, Expression child) {
+ return new ToCartesianPoint(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeSerializationTests.java
new file mode 100644
index 000000000000..96762ca28040
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeSerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class ToCartesianShapeSerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected ToCartesianShape create(Source source, Expression child) {
+ return new ToCartesianShape(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeSerializationTests.java
new file mode 100644
index 000000000000..935269ee76f4
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeSerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class ToDatetimeSerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected ToDatetime create(Source source, Expression child) {
+ return new ToDatetime(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesSerializationTests.java
new file mode 100644
index 000000000000..fd0f1dba4bf6
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesSerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class ToDegreesSerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected ToDegrees create(Source source, Expression child) {
+ return new ToDegrees(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleSerializationTests.java
new file mode 100644
index 000000000000..c2eef3b26dbb
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleSerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class ToDoubleSerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected ToDouble create(Source source, Expression child) {
+ return new ToDouble(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointSerializationTests.java
new file mode 100644
index 000000000000..9e210a887a17
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointSerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class ToGeoPointSerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected ToGeoPoint create(Source source, Expression child) {
+ return new ToGeoPoint(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeSerializationTests.java
new file mode 100644
index 000000000000..71e4bc335a90
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeSerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class ToGeoShapeSerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected ToGeoShape create(Source source, Expression child) {
+ return new ToGeoShape(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPSerializationTests.java
new file mode 100644
index 000000000000..76657639a583
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPSerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class ToIPSerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected ToIP create(Source source, Expression child) {
+ return new ToIP(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerSerializationTests.java
new file mode 100644
index 000000000000..3c8c47414b28
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerSerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class ToIntegerSerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected ToInteger create(Source source, Expression child) {
+ return new ToInteger(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongSerializationTests.java
new file mode 100644
index 000000000000..7acba8c04171
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongSerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class ToLongSerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected ToLong create(Source source, Expression child) {
+ return new ToLong(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansSerializationTests.java
new file mode 100644
index 000000000000..396feb6d13a9
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansSerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class ToRadiansSerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected ToRadians create(Source source, Expression child) {
+ return new ToRadians(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringSerializationTests.java
new file mode 100644
index 000000000000..08bfa106cbd9
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringSerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class ToStringSerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected ToString create(Source source, Expression child) {
+ return new ToString(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongSerializationTests.java
new file mode 100644
index 000000000000..3e58e8d4f4ad
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongSerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class ToUnsignedLongSerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected ToUnsignedLong create(Source source, Expression child) {
+ return new ToUnsignedLong(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionSerializationTests.java
new file mode 100644
index 000000000000..62548212d843
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionSerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.convert;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class ToVersionSerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected ToVersion create(Source source, Expression child) {
+ return new ToVersion(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsSerializationTests.java
new file mode 100644
index 000000000000..fd447c34d3fa
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsSerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class AbsSerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected Abs create(Source source, Expression child) {
+ return new Abs(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosSerializationTests.java
new file mode 100644
index 000000000000..d980fa95c3b9
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosSerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class AcosSerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected Acos create(Source source, Expression child) {
+ return new Acos(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinSerializationTests.java
new file mode 100644
index 000000000000..09000388c553
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinSerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class AsinSerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected Asin create(Source source, Expression child) {
+ return new Asin(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanSerializationTests.java
new file mode 100644
index 000000000000..2176f06c82a1
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanSerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class AtanSerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected Atan create(Source source, Expression child) {
+ return new Atan(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtSerializationTests.java
new file mode 100644
index 000000000000..294dd1d378d2
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CbrtSerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class CbrtSerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected Cbrt create(Source source, Expression child) {
+ return new Cbrt(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilSerializationTests.java
new file mode 100644
index 000000000000..7105a44ed9a0
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilSerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class CeilSerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected Ceil create(Source source, Expression child) {
+ return new Ceil(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosSerializationTests.java
new file mode 100644
index 000000000000..0be0c411ebea
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosSerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class CosSerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected Cos create(Source source, Expression child) {
+ return new Cos(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshSerializationTests.java
new file mode 100644
index 000000000000..cb8ee99869c2
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshSerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class CoshSerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected Cosh create(Source source, Expression child) {
+ return new Cosh(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorSerializationTests.java
new file mode 100644
index 000000000000..48fdd68e8690
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorSerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class FloorSerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected Floor create(Source source, Expression child) {
+ return new Floor(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10SerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10SerializationTests.java
new file mode 100644
index 000000000000..2b79bbeb8a9c
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10SerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class Log10SerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected Log10 create(Source source, Expression child) {
+ return new Log10(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinSerializationTests.java
new file mode 100644
index 000000000000..c9118fceaf5f
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinSerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class SinSerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected Sin create(Source source, Expression child) {
+ return new Sin(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhSerializationTests.java
new file mode 100644
index 000000000000..c87e41ef3fbb
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhSerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class SinhSerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected Sinh create(Source source, Expression child) {
+ return new Sinh(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtSerializationTests.java
new file mode 100644
index 000000000000..526f50eaa4d2
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtSerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class SqrtSerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected Sqrt create(Source source, Expression child) {
+ return new Sqrt(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanSerializationTests.java
new file mode 100644
index 000000000000..9c1a0a9f514c
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanSerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class TanSerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected Tan create(Source source, Expression child) {
+ return new Tan(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhSerializationTests.java
new file mode 100644
index 000000000000..3899ad34851e
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhSerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.function.scalar.math;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class TanhSerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected Tanh create(Source source, Expression child) {
+ return new Tanh(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegSerializationTests.java
new file mode 100644
index 000000000000..241958f12d69
--- /dev/null
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegSerializationTests.java
@@ -0,0 +1,19 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic;
+
+import org.elasticsearch.xpack.esql.core.expression.Expression;
+import org.elasticsearch.xpack.esql.core.tree.Source;
+import org.elasticsearch.xpack.esql.expression.AbstractUnaryScalarSerializationTests;
+
+public class NegSerializationTests extends AbstractUnaryScalarSerializationTests {
+ @Override
+ protected Neg create(Source source, Expression child) {
+ return new Neg(source, child);
+ }
+}
diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java
index b45fcd272439..2278be659c53 100644
--- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java
+++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java
@@ -194,7 +194,7 @@ public class PlanNamedTypesTests extends ESTestCase {
public void testFunctionEntries() {
var serializableFunctions = PlanNamedTypes.namedTypeEntries()
.stream()
- .filter(e -> Function.class.isAssignableFrom(e.concreteClass()))
+ .filter(e -> Function.class.isAssignableFrom(e.categoryClass()))
.map(PlanNameRegistry.Entry::name)
.sorted()
.toList();
From bf7108752ec8f7b71e843338a41136828fb4b6e7 Mon Sep 17 00:00:00 2001
From: Rene Groeschke
Date: Tue, 11 Jun 2024 23:14:44 +0200
Subject: [PATCH 21/34] Do not auto publish build scans if server is not
configured (#109549)
---
.../src/main/groovy/elasticsearch.build-scan.gradle | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle
index f0a7b1a6d0b1..c6930c2263ec 100644
--- a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle
+++ b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle
@@ -26,6 +26,10 @@ develocity {
if (jenkinsUrl?.host?.endsWith('elastic.co') || jenkinsUrl?.host?.endsWith('elastic.dev') || System.getenv('BUILDKITE') == 'true') {
publishing.onlyIf { true }
server = 'https://gradle-enterprise.elastic.co'
+ } else {
+ publishing.onlyIf {
+ server.isPresent();
+ }
}
From 429b539e5cb5f59a132258e0ed82600836569846 Mon Sep 17 00:00:00 2001
From: Dianna Hohensee
Date: Tue, 11 Jun 2024 19:41:09 -0400
Subject: [PATCH 22/34] Document backwards compatibility in arch guide
(#109589)
Transferring our slack conversation to a more widely consumable format.
---
docs/internal/GeneralArchitectureGuide.md | 46 +++++++++++++++++++++++
1 file changed, 46 insertions(+)
diff --git a/docs/internal/GeneralArchitectureGuide.md b/docs/internal/GeneralArchitectureGuide.md
index a2dadb70bf97..b4f32d73f165 100644
--- a/docs/internal/GeneralArchitectureGuide.md
+++ b/docs/internal/GeneralArchitectureGuide.md
@@ -68,6 +68,52 @@ state must ever be reloaded from persisted state.
## Deprecations
+## Backwards Compatibility
+
+major releases are mostly about breaking compatibility and dropping deprecated functionality.
+
+Elasticsearch versions are composed of three pieces of information: the major version, the minor version, and the patch version,
+in that order (major.minor.patch). Patch releases are typically bug fixes; minor releases contain improvements / new features;
+and major releases essentially break compatibility and enable removal of deprecated functionality. As an example, each of 8.0.0,
+8.3.0 and 8.3.1 specifies an exact release version. They all have the same major version (8) and the last two have the same minor
+version (8.3). Multiversion compatibility within a cluster, or backwards compatibility with older version nodes, is guaranteed
+across specific versions.
+
+### Transport Layer Backwards Compatibility
+
+Elasticsearch nodes can communicate over the network with all node versions within the same major release. All versions within
+one major version X are also compatible with the last minor version releases of the previous major version, i.e. (X-1).last.
+More concretely, all 8.x.x version nodes can communicate with all 7.17.x version nodes.
+
+### Index Format Backwards Compatibility
+
+Index data format backwards compatibility is guaranteed with all versions of the previous major release. All 8.x.x version nodes,
+for example, can read index data written by any 7.x.x version node. 9.x.x versions, however, will not be able to read 7.x.x format
+data files.
+
+Elasticsearch does not have an upgrade process to convert from older to newer index data formats. The user is expected to run
+`reindex` on any remaining untouched data from a previous version upgrade before upgrading to the next version. There is a good
+chance that older version index data will age out and be deleted before the user does the next upgrade, but `reindex` can be used
+if that is not the case.
+
+### Snapshot Backwards Compatibility
+
+Snapshots taken by a cluster of version X cannot be read by a cluster running older version nodes. However, snapshots taken by an
+older version cluster can continue to be read from and written to by newer version clusters: this compatibility goes back many
+major versions. If a newer version cluster writes to a snapshot repository containing snapshots from an older version, then it
+will do so in a way that leaves the repository format (metadata and file layout) readable by those older versions.
+
+Restoring indexes that have different and no longer supported data formats can be tricky: see the
+[public snapshot compatibility docs][] for details.
+
+[public snapshot compatibility docs]: https://www.elastic.co/guide/en/elasticsearch/reference/current/snapshot-restore.html#snapshot-index-compatibility
+
+### Upgrade
+
+See the [public upgrade docs][] for the upgrade process.
+
+[public upgrade docs]: https://www.elastic.co/guide/en/elasticsearch/reference/current/setup-upgrade.html
+
## Plugins
(what warrants a plugin?)
From 696191d1c746d22511d88a71329e1c3abf98bbbf Mon Sep 17 00:00:00 2001
From: Martijn van Groningen
Date: Wed, 12 Jun 2024 04:06:41 +0200
Subject: [PATCH 23/34] Fix testMapperDynamicIndexSetting() failures (#109574)
Currently these tests run against any old cluster older than 8.0.0, but
the fix that allowed `index.mapper.dynamic` to exist is only available
in 7.17.22.
Adjust these tests to only run if old cluster is after version 7.17.21
and before 8.0.0
---
.../upgrades/UpgradeWithOldIndexSettingsIT.java | 5 +++--
.../upgrades/UpgradeWithOldIndexSettingsIT.java | 5 +++--
2 files changed, 6 insertions(+), 4 deletions(-)
diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java
index 95178429317b..3a2200f15276 100644
--- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java
+++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java
@@ -50,8 +50,9 @@ public class UpgradeWithOldIndexSettingsIT extends ParameterizedFullClusterResta
public void testMapperDynamicIndexSetting() throws IOException {
assumeTrue(
- "Setting deprecated in 6.x, but remained in 7.x and is no longer defined in 8.x",
- getOldClusterTestVersion().before("8.0.0")
+ "Setting deprecated in 6.x, but was disallowed/removed incorrectly in some 7.x versions and can only be set safely in 7.17.22. "
+ + "Setting can't be used in 8.x ",
+ getOldClusterTestVersion().before("8.0.0") && getOldClusterTestVersion().after("7.17.21")
);
String indexName = "my-index";
if (isRunningAgainstOldCluster()) {
diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java
index ba873ef6bbd7..ae75069fa564 100644
--- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java
+++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java
@@ -106,8 +106,9 @@ public class UpgradeWithOldIndexSettingsIT extends AbstractRollingUpgradeTestCas
public void testMapperDynamicIndexSetting() throws IOException {
assumeTrue(
- "Setting deprecated in 6.x, but remained in 7.x and is no longer defined in 8.x",
- getOldClusterTestVersion().before("8.0.0")
+ "Setting deprecated in 6.x, but was disallowed/removed incorrectly in some 7.x versions and can only be set safely in 7.17.22. "
+ + "Setting can't be used in 8.x ",
+ getOldClusterTestVersion().before("8.0.0") && getOldClusterTestVersion().after("7.17.21")
);
String indexName = "my-index";
if (isOldCluster()) {
From 75617523e3ee62a051c086e9e0d0e1502ce5bf93 Mon Sep 17 00:00:00 2001
From: Tim Vernum
Date: Wed, 12 Jun 2024 13:24:26 +1000
Subject: [PATCH 24/34] [Test] Force close of HTTP response stream (#109558)
In JDK23 the `HttpServer` requires that http response be explicitly
closed (even if there is no response body)
Resolves: #109452
---
.../xpack/security/authc/saml/SamlServiceProviderMetadataIT.java | 1 +
1 file changed, 1 insertion(+)
diff --git a/x-pack/plugin/security/qa/saml-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/saml/SamlServiceProviderMetadataIT.java b/x-pack/plugin/security/qa/saml-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/saml/SamlServiceProviderMetadataIT.java
index 383598c804f7..9d2168267bb8 100644
--- a/x-pack/plugin/security/qa/saml-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/saml/SamlServiceProviderMetadataIT.java
+++ b/x-pack/plugin/security/qa/saml-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/saml/SamlServiceProviderMetadataIT.java
@@ -144,6 +144,7 @@ public class SamlServiceProviderMetadataIT extends ESRestTestCase {
} else {
if (randomBoolean()) {
http.sendResponseHeaders(randomFrom(404, 401, 403, 500), 0);
+ http.getResponseBody().close();
} else {
sendXmlContent("not valid xml", http);
}
From b99b5d5f253bc0f91783aa52f79d23443a8205ce Mon Sep 17 00:00:00 2001
From: David Turner
Date: Wed, 12 Jun 2024 06:40:00 +0100
Subject: [PATCH 25/34] Remove unused seek-tracking plugin (#109600)
This was used for some performance investigations but is not currently
needed, and would need updating in order to complete #100878. Instead,
this commit removes it.
---
.../internal/RestrictedBuildApiService.java | 1 -
.../seek-tracking-directory/build.gradle | 14 -
.../test/seektracker/SeekTrackerPluginIT.java | 55 ----
.../test/seektracker/IndexSeekTracker.java | 51 ----
.../test/seektracker/NodeSeekStats.java | 52 ----
.../test/seektracker/RestSeekStatsAction.java | 41 ---
.../test/seektracker/SeekStatsRequest.java | 42 ---
.../test/seektracker/SeekStatsResponse.java | 69 -----
.../test/seektracker/SeekStatsService.java | 30 --
.../test/seektracker/SeekTrackerPlugin.java | 100 -------
.../SeekTrackingDirectoryWrapper.java | 269 ------------------
.../test/seektracker/ShardSeekStats.java | 36 ---
.../seektracker/TransportSeekStatsAction.java | 83 ------
13 files changed, 843 deletions(-)
delete mode 100644 test/external-modules/seek-tracking-directory/build.gradle
delete mode 100644 test/external-modules/seek-tracking-directory/src/internalClusterTest/java/org/elasticsearch/test/seektracker/SeekTrackerPluginIT.java
delete mode 100644 test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/IndexSeekTracker.java
delete mode 100644 test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/NodeSeekStats.java
delete mode 100644 test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/RestSeekStatsAction.java
delete mode 100644 test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekStatsRequest.java
delete mode 100644 test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekStatsResponse.java
delete mode 100644 test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekStatsService.java
delete mode 100644 test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekTrackerPlugin.java
delete mode 100644 test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekTrackingDirectoryWrapper.java
delete mode 100644 test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/ShardSeekStats.java
delete mode 100644 test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/TransportSeekStatsAction.java
diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java
index 23afcab7bec7..9cc98e79183c 100644
--- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java
+++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/RestrictedBuildApiService.java
@@ -71,7 +71,6 @@ public abstract class RestrictedBuildApiService implements BuildService> nodePlugins() {
- return List.of(SeekTrackerPlugin.class);
- }
-
- @Override
- protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) {
- return Settings.builder()
- .put(super.nodeSettings(nodeOrdinal, otherSettings))
- .put(SeekTrackerPlugin.SEEK_TRACKING_ENABLED.getKey(), "true")
- .build();
- }
-
- public void testSeekTrackerPlugin() throws InterruptedException {
-
- assertAcked(indicesAdmin().prepareCreate("index"));
- List docs = new ArrayList<>();
- for (int i = 0; i < 100; i++) {
- docs.add(prepareIndex("index").setSource("field", "term" + i % 5));
- }
- indexRandom(true, docs);
-
- prepareSearch("index").setQuery(QueryBuilders.termQuery("field", "term2")).get().decRef();
-
- SeekStatsResponse response = client().execute(SeekTrackerPlugin.SEEK_STATS_ACTION, new SeekStatsRequest("index")).actionGet();
- List shardSeekStats = response.getSeekStats().get("index");
- assertThat(shardSeekStats.size(), greaterThan(0));
- }
-
-}
diff --git a/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/IndexSeekTracker.java b/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/IndexSeekTracker.java
deleted file mode 100644
index 2cb3fa4bbe6a..000000000000
--- a/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/IndexSeekTracker.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-package org.elasticsearch.test.seektracker;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.atomic.LongAdder;
-
-public class IndexSeekTracker {
-
- private final String index;
- private final Map> seeks = new HashMap<>();
-
- public IndexSeekTracker(String index) {
- this.index = index;
- }
-
- public void track(String shard) {
- seeks.computeIfAbsent(shard, k -> new ConcurrentHashMap<>()); // increment can be called by multiple threads
- }
-
- public void increment(String shard, String file) {
- seeks.get(shard).computeIfAbsent(file, s -> new LongAdder()).increment();
- }
-
- public List getSeeks() {
- List values = new ArrayList<>();
- seeks.forEach((k, v) -> values.add(getSeeksForShard(k)));
- return values;
- }
-
- private ShardSeekStats getSeeksForShard(String shard) {
- Map seeksPerFile = new HashMap<>();
- seeks.get(shard).forEach((name, adder) -> seeksPerFile.put(name, adder.longValue()));
- return new ShardSeekStats(shard, seeksPerFile);
- }
-
- @Override
- public String toString() {
- return "seeks for " + index + ": " + seeks;
- }
-}
diff --git a/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/NodeSeekStats.java b/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/NodeSeekStats.java
deleted file mode 100644
index 8b2d95c3cf57..000000000000
--- a/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/NodeSeekStats.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-package org.elasticsearch.test.seektracker;
-
-import org.elasticsearch.action.support.nodes.BaseNodeResponse;
-import org.elasticsearch.cluster.node.DiscoveryNode;
-import org.elasticsearch.common.io.stream.StreamInput;
-import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.xcontent.ToXContent;
-import org.elasticsearch.xcontent.ToXContentFragment;
-import org.elasticsearch.xcontent.XContentBuilder;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.Map;
-
-public class NodeSeekStats extends BaseNodeResponse implements ToXContentFragment {
-
- private final Map> seeks;
-
- public NodeSeekStats(DiscoveryNode node, Map> seeks) {
- super(node);
- this.seeks = seeks;
- }
-
- public NodeSeekStats(StreamInput in) throws IOException {
- super(in);
- this.seeks = in.readMap(s -> s.readCollectionAsList(ShardSeekStats::new));
- }
-
- @Override
- public void writeTo(StreamOutput out) throws IOException {
- super.writeTo(out);
- out.writeMap(seeks, StreamOutput::writeCollection);
- }
-
- @Override
- public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
- builder.mapContents(seeks);
- return builder;
- }
-
- public Map> getSeekStats() {
- return seeks;
- }
-}
diff --git a/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/RestSeekStatsAction.java b/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/RestSeekStatsAction.java
deleted file mode 100644
index 8695a08ce06a..000000000000
--- a/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/RestSeekStatsAction.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-package org.elasticsearch.test.seektracker;
-
-import org.elasticsearch.client.internal.node.NodeClient;
-import org.elasticsearch.common.Strings;
-import org.elasticsearch.rest.BaseRestHandler;
-import org.elasticsearch.rest.RestHandler;
-import org.elasticsearch.rest.RestRequest;
-import org.elasticsearch.rest.action.RestToXContentListener;
-
-import java.util.List;
-
-public class RestSeekStatsAction extends BaseRestHandler {
-
- @Override
- public String getName() {
- return "seek_stats_action";
- }
-
- @Override
- public List routes() {
- return List.of(
- new RestHandler.Route(RestRequest.Method.GET, "/_seek_stats"),
- new RestHandler.Route(RestRequest.Method.GET, "/{index}/_seek_stats")
- );
- }
-
- @Override
- protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) {
- String[] indices = request.paramAsStringArray("index", Strings.EMPTY_ARRAY);
- SeekStatsRequest seekStatsRequest = new SeekStatsRequest(indices);
- return channel -> client.execute(SeekTrackerPlugin.SEEK_STATS_ACTION, seekStatsRequest, new RestToXContentListener<>(channel));
- }
-}
diff --git a/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekStatsRequest.java b/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekStatsRequest.java
deleted file mode 100644
index 86dc35cc3cd4..000000000000
--- a/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekStatsRequest.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-package org.elasticsearch.test.seektracker;
-
-import org.elasticsearch.action.support.nodes.BaseNodesRequest;
-import org.elasticsearch.common.Strings;
-import org.elasticsearch.common.io.stream.StreamInput;
-import org.elasticsearch.common.io.stream.StreamOutput;
-
-import java.io.IOException;
-
-public class SeekStatsRequest extends BaseNodesRequest {
-
- private final String[] indices;
-
- public SeekStatsRequest(String... indices) {
- super(Strings.EMPTY_ARRAY);
- this.indices = indices;
- }
-
- public SeekStatsRequest(StreamInput in) throws IOException {
- super(in);
- this.indices = in.readStringArray();
- }
-
- @Override
- public void writeTo(StreamOutput out) throws IOException {
- super.writeTo(out);
- out.writeStringArray(indices);
- }
-
- public String[] getIndices() {
- return indices;
- }
-
-}
diff --git a/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekStatsResponse.java b/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekStatsResponse.java
deleted file mode 100644
index 27c28345091e..000000000000
--- a/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekStatsResponse.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-package org.elasticsearch.test.seektracker;
-
-import org.elasticsearch.action.FailedNodeException;
-import org.elasticsearch.action.support.TransportAction;
-import org.elasticsearch.action.support.nodes.BaseNodesResponse;
-import org.elasticsearch.cluster.ClusterName;
-import org.elasticsearch.common.Strings;
-import org.elasticsearch.common.io.stream.StreamInput;
-import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.xcontent.ToXContent;
-import org.elasticsearch.xcontent.ToXContentObject;
-import org.elasticsearch.xcontent.XContentBuilder;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-public class SeekStatsResponse extends BaseNodesResponse implements ToXContentObject {
-
- public SeekStatsResponse(ClusterName clusterName, List seekStats, List failures) {
- super(clusterName, seekStats, failures);
- }
-
- @Override
- protected List readNodesFrom(StreamInput in) {
- return TransportAction.localOnly();
- }
-
- @Override
- protected void writeNodesTo(StreamOutput out, List nodes) {
- TransportAction.localOnly();
- }
-
- @Override
- public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
- builder.startObject();
- for (NodeSeekStats seekStats : getNodes()) {
- builder.startObject(seekStats.getNode().getId());
- seekStats.toXContent(builder, params);
- builder.endObject();
- }
- builder.endObject();
- return builder;
- }
-
- public Map> getSeekStats() {
- Map> combined = new HashMap<>();
- for (NodeSeekStats nodeSeekStats : getNodes()) {
- nodeSeekStats.getSeekStats()
- .forEach((index, shardSeekStats) -> combined.computeIfAbsent(index, k -> new ArrayList<>()).addAll(shardSeekStats));
- }
- return combined;
- }
-
- @Override
- public String toString() {
- return Strings.toString(this);
- }
-}
diff --git a/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekStatsService.java b/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekStatsService.java
deleted file mode 100644
index d98d87ab87ff..000000000000
--- a/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekStatsService.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-package org.elasticsearch.test.seektracker;
-
-import java.util.HashMap;
-import java.util.Map;
-
-public class SeekStatsService {
-
- private final Map seeks = new HashMap<>();
-
- public IndexSeekTracker registerIndex(String index) {
- return seeks.computeIfAbsent(index, IndexSeekTracker::new);
- }
-
- public Map getSeekStats() {
- return seeks;
- }
-
- public IndexSeekTracker getSeekStats(String index) {
- return seeks.get(index);
- }
-
-}
diff --git a/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekTrackerPlugin.java b/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekTrackerPlugin.java
deleted file mode 100644
index 54ef53b8969e..000000000000
--- a/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekTrackerPlugin.java
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-package org.elasticsearch.test.seektracker;
-
-import org.elasticsearch.action.ActionRequest;
-import org.elasticsearch.action.ActionResponse;
-import org.elasticsearch.action.ActionType;
-import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
-import org.elasticsearch.cluster.node.DiscoveryNodes;
-import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
-import org.elasticsearch.common.settings.ClusterSettings;
-import org.elasticsearch.common.settings.IndexScopedSettings;
-import org.elasticsearch.common.settings.Setting;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.settings.SettingsFilter;
-import org.elasticsearch.features.NodeFeature;
-import org.elasticsearch.index.IndexModule;
-import org.elasticsearch.plugins.ActionPlugin;
-import org.elasticsearch.plugins.Plugin;
-import org.elasticsearch.rest.RestController;
-import org.elasticsearch.rest.RestHandler;
-
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-import java.util.function.Predicate;
-import java.util.function.Supplier;
-
-public class SeekTrackerPlugin extends Plugin implements ActionPlugin {
-
- /** Setting for enabling or disabling seek tracking. Defaults to false. */
- public static final Setting SEEK_TRACKING_ENABLED = Setting.boolSetting(
- "seektracker.enabled",
- false,
- Setting.Property.NodeScope
- );
-
- public static final ActionType SEEK_STATS_ACTION = new ActionType<>("cluster:monitor/seek_stats");
-
- private final SeekStatsService seekStatsService = new SeekStatsService();
- private final boolean enabled;
-
- public SeekTrackerPlugin(Settings settings) {
- this.enabled = SEEK_TRACKING_ENABLED.get(settings);
- }
-
- @Override
- public List> getSettings() {
- return List.of(SEEK_TRACKING_ENABLED);
- }
-
- @Override
- public Collection> createComponents(PluginServices services) {
- return Collections.singletonList(seekStatsService);
- }
-
- // seeks per index/shard/file
-
- @Override
- public void onIndexModule(IndexModule indexModule) {
- if (enabled) {
- IndexSeekTracker seekTracker = seekStatsService.registerIndex(indexModule.getIndex().getName());
- indexModule.setDirectoryWrapper(new SeekTrackingDirectoryWrapper(seekTracker));
- }
- }
-
- @Override
- public List getRestHandlers(
- Settings settings,
- NamedWriteableRegistry namedWriteableRegistry,
- RestController restController,
- ClusterSettings clusterSettings,
- IndexScopedSettings indexScopedSettings,
- SettingsFilter settingsFilter,
- IndexNameExpressionResolver indexNameExpressionResolver,
- Supplier nodesInCluster,
- Predicate clusterSupportsFeature
- ) {
- if (enabled) {
- return Collections.singletonList(new RestSeekStatsAction());
- } else {
- return Collections.emptyList();
- }
- }
-
- @Override
- public List> getActions() {
- if (enabled) {
- return Collections.singletonList(new ActionHandler<>(SEEK_STATS_ACTION, TransportSeekStatsAction.class));
- } else {
- return Collections.emptyList();
- }
- }
-}
diff --git a/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekTrackingDirectoryWrapper.java b/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekTrackingDirectoryWrapper.java
deleted file mode 100644
index 9b3d31022c58..000000000000
--- a/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekTrackingDirectoryWrapper.java
+++ /dev/null
@@ -1,269 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-package org.elasticsearch.test.seektracker;
-
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.FilterDirectory;
-import org.apache.lucene.store.IOContext;
-import org.apache.lucene.store.IndexInput;
-import org.apache.lucene.store.RandomAccessInput;
-import org.elasticsearch.cluster.routing.ShardRouting;
-import org.elasticsearch.index.IndexModule;
-
-import java.io.IOException;
-import java.util.Map;
-import java.util.Set;
-
-public class SeekTrackingDirectoryWrapper implements IndexModule.DirectoryWrapper {
-
- private final IndexSeekTracker seekTracker;
-
- public SeekTrackingDirectoryWrapper(IndexSeekTracker seekTracker) {
- this.seekTracker = seekTracker;
- }
-
- @Override
- public Directory wrap(Directory directory, ShardRouting shardRouting) {
- seekTracker.track(shardRouting.shardId().toString());
- return new FilterDirectory(directory) {
- @Override
- public IndexInput openInput(String name, IOContext context) throws IOException {
- IndexInput input = super.openInput(name, context);
- if (input instanceof RandomAccessInput) {
- return new RandomAccessSeekCountingIndexInput(input, shardRouting.shardId().toString(), name);
- }
- return wrapIndexInput(shardRouting.shardId().toString(), name, input);
- }
- };
- }
-
- private IndexInput wrapIndexInput(String directory, String name, IndexInput in) {
- return new SeekCountingIndexInput(in, directory, name);
- }
-
- class RandomAccessSeekCountingIndexInput extends SeekCountingIndexInput implements RandomAccessInput {
-
- private final RandomAccessInput randomAccessInput;
-
- RandomAccessSeekCountingIndexInput(IndexInput in, String directory, String name) {
- super(in, directory, name);
- randomAccessInput = (RandomAccessInput) unwrap(in);
- }
-
- @Override
- public IndexInput clone() {
- return new RandomAccessSeekCountingIndexInput(super.clone(), directory, name);
- }
-
- @Override
- public byte readByte(long pos) throws IOException {
- return randomAccessInput.readByte(pos);
- }
-
- @Override
- public short readShort(long pos) throws IOException {
- return randomAccessInput.readShort(pos);
- }
-
- @Override
- public int readInt(long pos) throws IOException {
- return randomAccessInput.readInt(pos);
- }
-
- @Override
- public long readLong(long pos) throws IOException {
- return randomAccessInput.readLong(pos);
- }
- }
-
- class SeekCountingIndexInput extends IndexInput {
-
- public static IndexInput unwrap(IndexInput input) {
- while (input instanceof SeekCountingIndexInput) {
- input = ((SeekCountingIndexInput) input).in;
- }
- return input;
- }
-
- final IndexInput in;
- final String directory;
- final String name;
-
- SeekCountingIndexInput(IndexInput in, String directory, String name) {
- super(unwrap(in).toString() + "[seek_tracked]");
- this.in = unwrap(in);
- this.directory = directory;
- this.name = name;
- }
-
- @Override
- public IndexInput clone() {
- return new SeekCountingIndexInput(in.clone(), directory, name);
- }
-
- @Override
- public void close() throws IOException {
- in.close();
- }
-
- @Override
- public long getFilePointer() {
- return in.getFilePointer();
- }
-
- @Override
- public void seek(long pos) throws IOException {
- in.seek(pos);
- seekTracker.increment(directory, name);
- }
-
- @Override
- public long length() {
- return in.length();
- }
-
- @Override
- public IndexInput slice(String sliceDescription, long offset, long length) throws IOException {
- return wrapIndexInput(directory, name, in.slice(sliceDescription + "[seek_tracked]", offset, length));
- }
-
- @Override
- public RandomAccessInput randomAccessSlice(long offset, long length) throws IOException {
- final IndexInput innerSlice = in.slice("randomaccess", offset, length);
- if (innerSlice instanceof RandomAccessInput) {
- // slice() already supports random access
- return new RandomAccessSeekCountingIndexInput(innerSlice, directory, name);
- } else {
- IndexInput slice = wrapIndexInput(directory, name, innerSlice);
- // return default impl
- return new RandomAccessInput() {
- @Override
- public long length() {
- return slice.length();
- }
-
- @Override
- public byte readByte(long pos) throws IOException {
- slice.seek(pos);
- return slice.readByte();
- }
-
- @Override
- public short readShort(long pos) throws IOException {
- slice.seek(pos);
- return slice.readShort();
- }
-
- @Override
- public int readInt(long pos) throws IOException {
- slice.seek(pos);
- return slice.readInt();
- }
-
- @Override
- public long readLong(long pos) throws IOException {
- slice.seek(pos);
- return slice.readLong();
- }
-
- @Override
- public String toString() {
- return "RandomAccessInput(" + slice + ")";
- }
- };
- }
- }
-
- @Override
- public byte readByte() throws IOException {
- return in.readByte();
- }
-
- @Override
- public void readBytes(byte[] b, int offset, int len) throws IOException {
- in.readBytes(b, offset, len);
- }
-
- @Override
- public void readBytes(byte[] b, int offset, int len, boolean useBuffer) throws IOException {
- in.readBytes(b, offset, len, useBuffer);
- }
-
- @Override
- public short readShort() throws IOException {
- return in.readShort();
- }
-
- @Override
- public int readInt() throws IOException {
- return in.readInt();
- }
-
- @Override
- public int readVInt() throws IOException {
- return in.readVInt();
- }
-
- @Override
- public int readZInt() throws IOException {
- return in.readZInt();
- }
-
- @Override
- public long readLong() throws IOException {
- return in.readLong();
- }
-
- @Override
- public long readVLong() throws IOException {
- return in.readVLong();
- }
-
- @Override
- public long readZLong() throws IOException {
- return in.readZLong();
- }
-
- @Override
- public String readString() throws IOException {
- return in.readString();
- }
-
- @Override
- public Map readMapOfStrings() throws IOException {
- return in.readMapOfStrings();
- }
-
- @Override
- public Set readSetOfStrings() throws IOException {
- return in.readSetOfStrings();
- }
-
- @Override
- public void skipBytes(long numBytes) throws IOException {
- in.skipBytes(numBytes);
- }
-
- @Override
- public void readFloats(float[] floats, int offset, int len) throws IOException {
- in.readFloats(floats, offset, len);
- }
-
- @Override
- public void readLongs(long[] dst, int offset, int length) throws IOException {
- in.readLongs(dst, offset, length);
- }
-
- @Override
- public void readInts(int[] dst, int offset, int length) throws IOException {
- in.readInts(dst, offset, length);
- }
-
- }
-}
diff --git a/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/ShardSeekStats.java b/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/ShardSeekStats.java
deleted file mode 100644
index 1f904c0807fb..000000000000
--- a/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/ShardSeekStats.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-package org.elasticsearch.test.seektracker;
-
-import org.elasticsearch.common.io.stream.StreamInput;
-import org.elasticsearch.common.io.stream.StreamOutput;
-import org.elasticsearch.common.io.stream.Writeable;
-import org.elasticsearch.xcontent.ToXContentObject;
-import org.elasticsearch.xcontent.XContentBuilder;
-
-import java.io.IOException;
-import java.util.Map;
-
-public record ShardSeekStats(String shard, Map seeksPerFile) implements Writeable, ToXContentObject {
-
- public ShardSeekStats(StreamInput in) throws IOException {
- this(in.readString(), in.readMap(StreamInput::readLong));
- }
-
- @Override
- public void writeTo(StreamOutput out) throws IOException {
- out.writeString(this.shard);
- out.writeMap(this.seeksPerFile, StreamOutput::writeLong);
- }
-
- @Override
- public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
- return builder.startObject().field("shard", this.shard).field("seeks", seeksPerFile).endObject();
- }
-}
diff --git a/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/TransportSeekStatsAction.java b/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/TransportSeekStatsAction.java
deleted file mode 100644
index bd1c35302b04..000000000000
--- a/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/TransportSeekStatsAction.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0 and the Server Side Public License, v 1; you may not use this file except
- * in compliance with, at your election, the Elastic License 2.0 or the Server
- * Side Public License, v 1.
- */
-
-package org.elasticsearch.test.seektracker;
-
-import org.elasticsearch.action.FailedNodeException;
-import org.elasticsearch.action.support.ActionFilters;
-import org.elasticsearch.action.support.nodes.TransportNodesAction;
-import org.elasticsearch.cluster.node.DiscoveryNode;
-import org.elasticsearch.cluster.service.ClusterService;
-import org.elasticsearch.common.inject.Inject;
-import org.elasticsearch.common.io.stream.StreamInput;
-import org.elasticsearch.tasks.Task;
-import org.elasticsearch.threadpool.ThreadPool;
-import org.elasticsearch.transport.TransportService;
-
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-public class TransportSeekStatsAction extends TransportNodesAction {
-
- private final SeekStatsService seekStatsService;
-
- @Inject
- public TransportSeekStatsAction(
- ThreadPool threadPool,
- ClusterService clusterService,
- TransportService transportService,
- ActionFilters actionFilters,
- SeekStatsService seekStatsService
- ) {
- super(
- SeekTrackerPlugin.SEEK_STATS_ACTION.name(),
- clusterService,
- transportService,
- actionFilters,
- SeekStatsRequest::new,
- threadPool.executor(ThreadPool.Names.MANAGEMENT)
- );
- this.seekStatsService = seekStatsService;
- }
-
- @Override
- protected SeekStatsResponse newResponse(SeekStatsRequest request, List seekStats, List failures) {
- return new SeekStatsResponse(clusterService.getClusterName(), seekStats, failures);
- }
-
- @Override
- protected SeekStatsRequest newNodeRequest(SeekStatsRequest request) {
- // TODO don't wrap the whole top-level request, it contains heavy and irrelevant DiscoveryNode things; see #100878
- return request;
- }
-
- @Override
- protected NodeSeekStats newNodeResponse(StreamInput in, DiscoveryNode node) throws IOException {
- return new NodeSeekStats(in);
- }
-
- @Override
- protected NodeSeekStats nodeOperation(SeekStatsRequest request, Task task) {
- Map> seeks = new HashMap<>();
- if (request.getIndices().length == 0) {
- for (Map.Entry entry : seekStatsService.getSeekStats().entrySet()) {
- seeks.put(entry.getKey(), entry.getValue().getSeeks());
- }
- } else {
- for (String index : request.getIndices()) {
- IndexSeekTracker indexSeekTracker = seekStatsService.getSeekStats(index);
- if (indexSeekTracker != null) {
- seeks.put(index, indexSeekTracker.getSeeks());
- }
- }
- }
- return new NodeSeekStats(clusterService.localNode(), seeks);
- }
-}
From 9abfcf2ada522910b10b4d5e2056d6dc635a1cb8 Mon Sep 17 00:00:00 2001
From: Armin Braun
Date: Wed, 12 Jun 2024 07:50:02 +0200
Subject: [PATCH 26/34] Optimize CompositeBytesReference.toBytesRef (#109288)
No need to copy to an intermediary (and oversized) `BytesRefBuilder` here.
We can do with a single round of copying and a single `byte[]` allocation since
we know all the sizes.
This method is called here and there e.g. when indexing large documents
so fixing this softens the blow from copying to fresh on-heap bytes by more
than 2x at least.
---
.../common/bytes/CompositeBytesReference.java | 26 ++++++++++++++-----
1 file changed, 20 insertions(+), 6 deletions(-)
diff --git a/server/src/main/java/org/elasticsearch/common/bytes/CompositeBytesReference.java b/server/src/main/java/org/elasticsearch/common/bytes/CompositeBytesReference.java
index b63d722df9b4..65a3bf95336c 100644
--- a/server/src/main/java/org/elasticsearch/common/bytes/CompositeBytesReference.java
+++ b/server/src/main/java/org/elasticsearch/common/bytes/CompositeBytesReference.java
@@ -9,7 +9,6 @@
package org.elasticsearch.common.bytes;
import org.apache.lucene.util.BytesRef;
-import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.BytesRefIterator;
import org.apache.lucene.util.RamUsageEstimator;
@@ -172,18 +171,33 @@ public final class CompositeBytesReference extends AbstractBytesReference {
@Override
public BytesRef toBytesRef() {
- BytesRefBuilder builder = new BytesRefBuilder();
- builder.grow(length());
+ final byte[] result = new byte[length];
+ int offset = 0;
+ for (BytesReference reference : references) {
+ if (reference.hasArray()) {
+ int len = reference.length();
+ System.arraycopy(reference.array(), reference.arrayOffset(), result, offset, len);
+ offset += len;
+ } else {
+ offset = copyViaIterator(reference, result, offset);
+ }
+ }
+ assert offset == result.length;
+ return new BytesRef(result);
+ }
+
+ private static int copyViaIterator(BytesReference reference, byte[] result, int offset) {
BytesRef spare;
- BytesRefIterator iterator = iterator();
+ BytesRefIterator iterator = reference.iterator();
try {
while ((spare = iterator.next()) != null) {
- builder.append(spare);
+ System.arraycopy(spare.bytes, spare.offset, result, offset, spare.length);
+ offset += spare.length;
}
} catch (IOException ex) {
throw new AssertionError("won't happen", ex); // this is really an error since we don't do IO in our bytesreferences
}
- return builder.toBytesRef();
+ return offset;
}
@Override
From a7cd581201efcadf2a8006b09ee19461303b67a1 Mon Sep 17 00:00:00 2001
From: Tanguy Leroux
Date: Wed, 12 Jun 2024 08:58:37 +0200
Subject: [PATCH 27/34] Update sparse file tracker `complete` pointer on
progress (#109247)
This change updates the complete pointer when a Gap makes progress.
---
.../ProgressListenableActionFuture.java | 51 +++++++++++----
.../blobcache/common/SparseFileTracker.java | 38 ++++++++++--
.../ProgressListenableActionFutureTests.java | 42 ++++++++++++-
.../common/SparseFileTrackerTests.java | 62 +++++++++++++++++++
4 files changed, 175 insertions(+), 18 deletions(-)
diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/ProgressListenableActionFuture.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/ProgressListenableActionFuture.java
index 6381f1c1e211..00cc9554a64e 100644
--- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/ProgressListenableActionFuture.java
+++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/ProgressListenableActionFuture.java
@@ -7,12 +7,16 @@
package org.elasticsearch.blobcache.common;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.PlainActionFuture;
+import org.elasticsearch.core.Nullable;
import java.util.ArrayList;
import java.util.List;
+import java.util.function.LongConsumer;
import java.util.function.Supplier;
/**
@@ -24,29 +28,40 @@ import java.util.function.Supplier;
*/
class ProgressListenableActionFuture extends PlainActionFuture {
+ private static final Logger logger = LogManager.getLogger(ProgressListenableActionFuture.class);
+
private record PositionAndListener(long position, ActionListener listener) {}
- protected final long start;
- protected final long end;
+ final long start;
+ final long end;
- // modified under 'this' mutex
- private volatile List listeners;
- protected volatile long progress;
+ /**
+ * A consumer that accepts progress made by this {@link ProgressListenableActionFuture}. The consumer is called before listeners are
+ * notified of the updated progress value in {@link #onProgress(long)} if the value is less than the actual end. The consumer can be
+ * called with out-of-order progress values.
+ */
+ @Nullable
+ private final LongConsumer progressConsumer;
+
+ private List listeners;
+ private long progress;
private volatile boolean completed;
/**
* Creates a {@link ProgressListenableActionFuture} that accepts the progression
* to be within {@code start} (inclusive) and {@code end} (exclusive) values.
*
- * @param start the start (inclusive)
- * @param end the end (exclusive)
+ * @param start the start (inclusive)
+ * @param end the end (exclusive)
+ * @param progressConsumer a consumer that accepts the progress made by this {@link ProgressListenableActionFuture}
*/
- ProgressListenableActionFuture(long start, long end) {
+ ProgressListenableActionFuture(long start, long end, @Nullable LongConsumer progressConsumer) {
super();
this.start = start;
this.end = end;
this.progress = start;
this.completed = false;
+ this.progressConsumer = progressConsumer;
assert invariant();
}
@@ -108,6 +123,9 @@ class ProgressListenableActionFuture extends PlainActionFuture {
}
}
if (listenersToExecute != null) {
+ if (progressConsumer != null) {
+ safeAcceptProgress(progressConsumer, progressValue);
+ }
listenersToExecute.forEach(listener -> executeListener(listener, () -> progressValue));
}
assert invariant();
@@ -115,8 +133,8 @@ class ProgressListenableActionFuture extends PlainActionFuture {
@Override
public void onResponse(Long result) {
- if (result == null || result < start || end < result) {
- assert false : start + " < " + result + " < " + end;
+ if (result == null || end != result) {
+ assert false : result + " != " + end;
throw new IllegalArgumentException("Invalid completion value [start=" + start + ",end=" + end + ",response=" + result + ']');
}
ensureNotCompleted();
@@ -143,6 +161,7 @@ class ProgressListenableActionFuture extends PlainActionFuture {
synchronized (this) {
assert completed == false;
completed = true;
+ assert listeners == null || listeners.stream().allMatch(l -> progress < l.position() && l.position() <= end);
listenersToExecute = this.listeners;
listeners = null;
}
@@ -189,8 +208,18 @@ class ProgressListenableActionFuture extends PlainActionFuture {
}
}
+ private static void safeAcceptProgress(LongConsumer consumer, long progress) {
+ assert consumer != null;
+ try {
+ consumer.accept(progress);
+ } catch (Exception e) {
+ assert false : e;
+ logger.warn("Failed to consume progress value", e);
+ }
+ }
+
@Override
- public String toString() {
+ public synchronized String toString() {
return "ProgressListenableActionFuture[start="
+ start
+ ", end="
diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/SparseFileTracker.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/SparseFileTracker.java
index 6e6a11fbddc9..e3ff6a7ae4c7 100644
--- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/SparseFileTracker.java
+++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/SparseFileTracker.java
@@ -18,6 +18,7 @@ import java.util.Comparator;
import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
+import java.util.function.LongConsumer;
/**
* Keeps track of the contents of a file that may not be completely present.
@@ -199,7 +200,7 @@ public class SparseFileTracker {
final Range newPendingRange = new Range(
targetRange.start,
range.end(),
- new ProgressListenableActionFuture(targetRange.start, range.end())
+ new ProgressListenableActionFuture(targetRange.start, range.end(), progressConsumer(targetRange.start))
);
ranges.add(newPendingRange);
pendingRanges.add(newPendingRange);
@@ -218,7 +219,7 @@ public class SparseFileTracker {
final Range newPendingRange = new Range(
targetRange.start,
newPendingRangeEnd,
- new ProgressListenableActionFuture(targetRange.start, newPendingRangeEnd)
+ new ProgressListenableActionFuture(targetRange.start, newPendingRangeEnd, progressConsumer(targetRange.start))
);
ranges.add(newPendingRange);
pendingRanges.add(newPendingRange);
@@ -260,6 +261,15 @@ public class SparseFileTracker {
}
}
+ private LongConsumer progressConsumer(long rangeStart) {
+ assert Thread.holdsLock(ranges);
+ if (rangeStart == complete) {
+ return this::updateCompletePointer;
+ } else {
+ return null;
+ }
+ }
+
public boolean checkAvailable(long upTo) {
assert upTo <= length : "tried to check availability up to [" + upTo + "] but length is only [" + length + "]";
return complete >= upTo;
@@ -464,11 +474,27 @@ public class SparseFileTracker {
private void maybeUpdateCompletePointer(Range gapRange) {
assert Thread.holdsLock(ranges);
if (gapRange.start == 0) {
- assert complete <= gapRange.end;
- complete = gapRange.end;
+ updateCompletePointerHoldingLock(gapRange.end);
}
}
+ private void updateCompletePointerHoldingLock(long value) {
+ assert Thread.holdsLock(ranges);
+ assert complete <= value : complete + ">" + value;
+ complete = value;
+ }
+
+ private void updateCompletePointer(long value) {
+ synchronized (ranges) {
+ updateCompletePointerHoldingLock(value);
+ }
+ }
+
+ // used in tests
+ long getComplete() {
+ return complete;
+ }
+
private boolean assertGapRangePending(Range gapRange) {
synchronized (ranges) {
assert invariant();
@@ -535,9 +561,9 @@ public class SparseFileTracker {
/**
* Range in the file corresponding to the current gap
*/
- public final Range range;
+ private final Range range;
- Gap(Range range) {
+ private Gap(Range range) {
assert range.start < range.end : range.start + "-" + range.end;
this.range = range;
}
diff --git a/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/common/ProgressListenableActionFutureTests.java b/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/common/ProgressListenableActionFutureTests.java
index a94a3214fdd9..4490d087cec1 100644
--- a/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/common/ProgressListenableActionFutureTests.java
+++ b/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/common/ProgressListenableActionFutureTests.java
@@ -233,9 +233,49 @@ public class ProgressListenableActionFutureTests extends ESTestCase {
assertThat(future.isDone(), is(true));
}
+ public void testLongConsumerCalledOnProgressUpdate() {
+ // min length of 2 to have at least one progress update before reaching the end
+ long length = randomLongBetween(2L, ByteSizeUnit.TB.toBytes(1L));
+ long start = randomLongBetween(Long.MIN_VALUE, Long.MAX_VALUE - length);
+ long end = start + length;
+
+ var consumed = new HashSet();
+ var future = new ProgressListenableActionFuture(
+ start,
+ end,
+ p -> assertThat("LongConsumer should not consumed the same value twice", consumed.add(p), equalTo(true))
+ );
+
+ long position = start;
+ int iters = randomIntBetween(10, 25);
+ for (int i = 0; i < iters && position < end - 1L; i++) {
+ var progress = randomLongBetween(position + 1L, end - 1L);
+
+ var listener = new PlainActionFuture();
+ future.addListener(
+ ActionListener.runBefore(
+ listener,
+ () -> assertThat(
+ "LongConsumer should have been called before listener completion",
+ consumed.contains(progress),
+ equalTo(true)
+ )
+ ),
+ randomLongBetween(position + 1L, progress)
+ );
+ future.onProgress(progress);
+
+ assertThat(consumed.contains(progress), equalTo(true));
+ assertThat(listener.isDone(), equalTo(true));
+ position = progress;
+ }
+ future.onProgress(end);
+ assertThat("LongConsumer is not called when progress is updated to the end", consumed.contains(end), equalTo(false));
+ }
+
private static ProgressListenableActionFuture randomFuture() {
final long delta = randomLongBetween(1L, ByteSizeUnit.TB.toBytes(1L));
final long start = randomLongBetween(Long.MIN_VALUE, Long.MAX_VALUE - delta);
- return new ProgressListenableActionFuture(start, start + delta);
+ return new ProgressListenableActionFuture(start, start + delta, null);
}
}
diff --git a/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/common/SparseFileTrackerTests.java b/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/common/SparseFileTrackerTests.java
index 5973b90c814d..fda560ccb2e2 100644
--- a/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/common/SparseFileTrackerTests.java
+++ b/x-pack/plugin/blob-cache/src/test/java/org/elasticsearch/blobcache/common/SparseFileTrackerTests.java
@@ -517,6 +517,68 @@ public class SparseFileTrackerTests extends ESTestCase {
assertThat(completedRanges, equalTo(expectedCompletedRanges));
}
+ public void testCompletePointerUpdatesOnProgress() {
+ // min length of 2 to have at least one progress update before reaching the end
+ byte[] bytes = new byte[between(2, 1024)];
+ var tracker = new SparseFileTracker(getTestName(), bytes.length);
+
+ long position = 0L;
+ for (int i = 0; i < 25 && position < tracker.getLength() - 1L; i++) {
+ var progress = randomLongBetween(position + 1L, tracker.getLength() - 1L);
+
+ var listener = new PlainActionFuture();
+ var gaps = tracker.waitForRange(
+ ByteRange.of(position, progress),
+ ByteRange.of(position, progress),
+ ActionListener.runBefore(listener, () -> assertThat(tracker.getComplete(), equalTo(progress)))
+ );
+ assertThat(listener.isDone(), equalTo(false));
+ assertThat(gaps, hasSize(1));
+
+ gaps.forEach(gap -> {
+ long latestUpdatedCompletePointer = gap.start();
+
+ for (long j = gap.start(); j < gap.end(); j++) {
+ final PlainActionFuture awaitingListener;
+ if (randomBoolean()) {
+ awaitingListener = new PlainActionFuture<>();
+ var moreGaps = tracker.waitForRange(
+ ByteRange.of(gap.start(), j + 1L),
+ ByteRange.of(gap.start(), j + 1L),
+ awaitingListener
+ );
+ assertThat(moreGaps.isEmpty(), equalTo(true));
+ } else {
+ awaitingListener = null;
+ }
+
+ assertThat(bytes[toIntBytes(j)], equalTo(UNAVAILABLE));
+ bytes[toIntBytes(j)] = AVAILABLE;
+ gap.onProgress(j + 1L);
+
+ if (awaitingListener != null && j < gap.end() - 1L) {
+ assertThat(
+ "Complete pointer should have been updated when a listener is waiting for the gap to be completed",
+ tracker.getComplete(),
+ equalTo(j + 1L)
+ );
+ assertThat(awaitingListener.isDone(), equalTo(true));
+ latestUpdatedCompletePointer = tracker.getComplete();
+ } else {
+ assertThat(
+ "Complete pointer is not updated if no listeners are waiting for the gap to be completed",
+ tracker.getComplete(),
+ equalTo(latestUpdatedCompletePointer)
+ );
+ }
+ }
+ gap.onCompletion();
+ assertThat(tracker.getComplete(), equalTo(gap.end()));
+ });
+ position = progress;
+ }
+ }
+
private static void checkRandomAbsentRange(byte[] fileContents, SparseFileTracker sparseFileTracker, boolean expectExact) {
final long checkStart = randomLongBetween(0, fileContents.length - 1);
final long checkEnd = randomLongBetween(checkStart, fileContents.length);
From faa9648a2923d4cbfd744938951962d93b2edf6c Mon Sep 17 00:00:00 2001
From: Luca Cavanna
Date: Wed, 12 Jun 2024 10:53:47 +0200
Subject: [PATCH 28/34] Disable auto-expand functionality for stateless indices
(#108797)
Replicas are automatically managed in stateless, hence auto-expand replicas needs to be disabled for stateless indices in order
for it not to conflict with the automatic management of replicas for indices.
As a result, stateless indices won't see auto-expand take any effect due to applying auto-expand replicas from the ES codebase.
There is an edge-case for indices that do set number of replicas to 0 manually as well as auto_expand_replicas as part of
index settings: in that case we force such value to be 1 rather than 0.
---
.../cluster/metadata/AutoExpandReplicas.java | 20 ++++++++++++-------
1 file changed, 13 insertions(+), 7 deletions(-)
diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java b/server/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java
index 106f4c1e4e38..9fb44e0106df 100644
--- a/server/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java
+++ b/server/src/main/java/org/elasticsearch/cluster/metadata/AutoExpandReplicas.java
@@ -100,13 +100,6 @@ public record AutoExpandReplicas(int minReplicas, int maxReplicas, boolean enabl
public int getDesiredNumberOfReplicas(IndexMetadata indexMetadata, RoutingAllocation allocation) {
assert enabled : "should only be called when enabled";
- // Make sure in stateless auto-expand indices always have 1 replica to ensure all shard roles are always present
- if (Objects.equals(
- indexMetadata.getSettings().get(ExistingShardsAllocator.EXISTING_SHARDS_ALLOCATOR_SETTING.getKey()),
- "stateless"
- )) {
- return 1;
- }
int numMatchingDataNodes = 0;
for (DiscoveryNode discoveryNode : allocation.nodes().getDataNodes().values()) {
Decision decision = allocation.deciders().shouldAutoExpandToNode(indexMetadata, discoveryNode, allocation);
@@ -150,9 +143,22 @@ public record AutoExpandReplicas(int minReplicas, int maxReplicas, boolean enabl
for (final IndexMetadata indexMetadata : metadata) {
if (indexMetadata.getState() == IndexMetadata.State.OPEN || isIndexVerifiedBeforeClosed(indexMetadata)) {
AutoExpandReplicas autoExpandReplicas = indexMetadata.getAutoExpandReplicas();
+ // Make sure auto-expand is applied only when configured, and entirely disabled in stateless
if (autoExpandReplicas.enabled() == false) {
continue;
}
+ // Special case for stateless indices: auto-expand is disabled, unless number_of_replicas has been set
+ // manually to 0 via index settings, which needs to be converted to 1.
+ if (Objects.equals(
+ indexMetadata.getSettings().get(ExistingShardsAllocator.EXISTING_SHARDS_ALLOCATOR_SETTING.getKey()),
+ "stateless"
+ )) {
+ if (indexMetadata.getNumberOfReplicas() == 0) {
+ nrReplicasChanged.computeIfAbsent(1, ArrayList::new).add(indexMetadata.getIndex().getName());
+ } else {
+ continue;
+ }
+ }
if (allocation == null) {
allocation = allocationSupplier.get();
}
From ac6013aa4b196f1cf183bf50fe3ef60590833886 Mon Sep 17 00:00:00 2001
From: Tim Vernum
Date: Wed, 12 Jun 2024 19:07:16 +1000
Subject: [PATCH 29/34] Extend assertion expiry in SAML auth tests (#109607)
Sometimes buildkite seems to swallow time. This commit extends some of
the authentication/session expiry times to compensate.
Relates: #109299
---
.../authc/saml/SamlAuthenticatorTests.java | 20 ++++++++++++-------
1 file changed, 13 insertions(+), 7 deletions(-)
diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java
index 16c7b39fa695..83f09bad0d27 100644
--- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java
+++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java
@@ -592,17 +592,23 @@ public class SamlAuthenticatorTests extends SamlResponseHandlerTests {
}
public void testExpiredAuthnStatementSessionIsRejected() throws Exception {
- Instant now = clock.instant();
- String xml = getSimpleResponseAsString(now);
+ final Instant now = clock.instant();
+ final int sessionExpirySeconds = 60;
+ final Instant subjectConfirmationValidUntil = now.plusSeconds(500);
+ final Instant sessionValidUntil = now.plusSeconds(sessionExpirySeconds);
+ final String xml = SamlUtils.getXmlContent(
+ getSimpleResponse(now, randomId(), randomId(), subjectConfirmationValidUntil, sessionValidUntil),
+ false
+ );
SamlToken token = token(signResponse(xml));
assertThat(authenticator.authenticate(token), notNullValue());
// and still valid if we advance partway through the session expiry time
- clock.fastForwardSeconds(30);
+ clock.fastForwardSeconds(sessionExpirySeconds / 2);
assertThat(authenticator.authenticate(token), notNullValue());
// and still valid if we advance past the expiry time, but allow for clock skew
- clock.fastForwardSeconds((int) (30 + maxSkew.seconds() / 2));
+ clock.fastForwardSeconds((int) (sessionExpirySeconds / 2 + maxSkew.seconds() / 2));
assertThat(authenticator.authenticate(token), notNullValue());
// but fails once we get past the clock skew allowance
@@ -1442,8 +1448,8 @@ public class SamlAuthenticatorTests extends SamlResponseHandlerTests {
}
private Response getSimpleResponse(Instant now, String nameId, String sessionindex) {
- Instant subjectConfirmationValidUntil = now.plusSeconds(120);
- Instant sessionValidUntil = now.plusSeconds(60);
+ Instant subjectConfirmationValidUntil = now.plusSeconds(500);
+ Instant sessionValidUntil = now.plusSeconds(300);
return getSimpleResponse(now, nameId, sessionindex, subjectConfirmationValidUntil, sessionValidUntil);
}
@@ -1565,7 +1571,7 @@ public class SamlAuthenticatorTests extends SamlResponseHandlerTests {
String nameId,
String sessionindex
) {
- Instant validUntil = now.plusSeconds(30);
+ Instant validUntil = now.plusSeconds(300);
String xml = "
Date: Wed, 12 Jun 2024 12:15:53 +0200
Subject: [PATCH 30/34] Speedup ContextIndexSearcher (#109601)
We can do a little better here and create less garbage + save cycles for
method calls when doing all the sorting.
---
.../search/internal/ContextIndexSearcher.java | 19 ++++++++++++-------
1 file changed, 12 insertions(+), 7 deletions(-)
diff --git a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java
index 0263c6e83b17..cba2cf761e6f 100644
--- a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java
+++ b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java
@@ -49,7 +49,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
-import java.util.Comparator;
import java.util.List;
import java.util.Objects;
import java.util.PriorityQueue;
@@ -255,13 +254,11 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable {
// Make a copy so we can sort:
List sortedLeaves = new ArrayList<>(leaves);
// Sort by maxDoc, descending:
- final Comparator leafComparator = Comparator.comparingInt(l -> l.reader().maxDoc());
- sortedLeaves.sort(leafComparator.reversed());
+ sortedLeaves.sort((c1, c2) -> Integer.compare(c2.reader().maxDoc(), c1.reader().maxDoc()));
// we add the groups on a priority queue, so we can add orphan leafs to the smallest group
- final Comparator> groupComparator = Comparator.comparingInt(
- l -> l.stream().mapToInt(lr -> lr.reader().maxDoc()).sum()
+ final PriorityQueue> queue = new PriorityQueue<>(
+ (c1, c2) -> Integer.compare(sumMaxDocValues(c1), sumMaxDocValues(c2))
);
- final PriorityQueue> queue = new PriorityQueue<>(groupComparator);
long docSum = 0;
List group = new ArrayList<>();
for (LeafReaderContext ctx : sortedLeaves) {
@@ -297,6 +294,14 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable {
return slices;
}
+ private static int sumMaxDocValues(List l) {
+ int sum = 0;
+ for (LeafReaderContext lr : l) {
+ sum += lr.reader().maxDoc();
+ }
+ return sum;
+ }
+
@Override
public T search(Query query, CollectorManager collectorManager) throws IOException {
final C firstCollector = collectorManager.newCollector();
@@ -337,7 +342,7 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable {
throw new IllegalStateException("CollectorManager does not always produce collectors with the same score mode");
}
}
- final List> listTasks = new ArrayList<>();
+ final List> listTasks = new ArrayList<>(leafSlices.length);
for (int i = 0; i < leafSlices.length; ++i) {
final LeafReaderContext[] leaves = leafSlices[i].leaves;
final C collector = collectors.get(i);
From 3b28c2f6a4dbadb007080f9355db7782bb603e2a Mon Sep 17 00:00:00 2001
From: Nick Tindall
Date: Wed, 12 Jun 2024 20:47:08 +1000
Subject: [PATCH 31/34] Consider error_trace supported by all endpoints
(#109613)
Closes #109612
---
docs/changelog/109613.yaml | 6 ++++++
.../java/org/elasticsearch/common/util/set/Sets.java | 9 +++++++++
.../java/org/elasticsearch/rest/BaseRestHandler.java | 3 ++-
.../main/java/org/elasticsearch/rest/RestResponse.java | 1 +
.../org/elasticsearch/rest/BaseRestHandlerTests.java | 1 +
5 files changed, 19 insertions(+), 1 deletion(-)
create mode 100644 docs/changelog/109613.yaml
diff --git a/docs/changelog/109613.yaml b/docs/changelog/109613.yaml
new file mode 100644
index 000000000000..21d152ac1d6d
--- /dev/null
+++ b/docs/changelog/109613.yaml
@@ -0,0 +1,6 @@
+pr: 109613
+summary: Consider `error_trace` supported by all endpoints
+area: Infra/REST API
+type: bug
+issues:
+ - 109612
diff --git a/server/src/main/java/org/elasticsearch/common/util/set/Sets.java b/server/src/main/java/org/elasticsearch/common/util/set/Sets.java
index 75e5717d41b9..5434dc91238c 100644
--- a/server/src/main/java/org/elasticsearch/common/util/set/Sets.java
+++ b/server/src/main/java/org/elasticsearch/common/util/set/Sets.java
@@ -135,6 +135,15 @@ public final class Sets {
return union;
}
+ @SafeVarargs
+ public static Set union(Set first, Set... others) {
+ Set union = new HashSet<>(first);
+ for (Set other : others) {
+ union.addAll(other);
+ }
+ return union;
+ }
+
/**
* The intersection of two sets. Namely, the resulting set contains all the elements that are in both sets.
* Neither input is mutated by this operation, an entirely new set is returned.
diff --git a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java
index f91b08439422..a17bc885f6b6 100644
--- a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java
+++ b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java
@@ -83,7 +83,7 @@ public abstract class BaseRestHandler implements RestHandler {
// check if the query has any parameters that are not in the supported set (if declared)
Set supported = allSupportedParameters();
if (supported != null) {
- var allSupported = Sets.union(ALWAYS_SUPPORTED, supported);
+ var allSupported = Sets.union(RestResponse.RESPONSE_PARAMS, ALWAYS_SUPPORTED, supported);
if (allSupported.containsAll(request.params().keySet()) == false) {
Set unsupported = Sets.difference(request.params().keySet(), allSupported);
throw new IllegalArgumentException(unrecognized(request, unsupported, allSupported, "parameter"));
@@ -97,6 +97,7 @@ public abstract class BaseRestHandler implements RestHandler {
// use a sorted set so the unconsumed parameters appear in a reliable sorted order
final SortedSet unconsumedParams = request.unconsumedParams()
.stream()
+ .filter(p -> RestResponse.RESPONSE_PARAMS.contains(p) == false)
.filter(p -> responseParams(request.getRestApiVersion()).contains(p) == false)
.collect(Collectors.toCollection(TreeSet::new));
diff --git a/server/src/main/java/org/elasticsearch/rest/RestResponse.java b/server/src/main/java/org/elasticsearch/rest/RestResponse.java
index 8cc0e35a6480..5502ab1ba094 100644
--- a/server/src/main/java/org/elasticsearch/rest/RestResponse.java
+++ b/server/src/main/java/org/elasticsearch/rest/RestResponse.java
@@ -37,6 +37,7 @@ import static org.elasticsearch.rest.RestController.ELASTIC_PRODUCT_HTTP_HEADER;
public final class RestResponse implements Releasable {
public static final String TEXT_CONTENT_TYPE = "text/plain; charset=UTF-8";
+ public static final Set RESPONSE_PARAMS = Set.of("error_trace");
static final String STATUS = "status";
diff --git a/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java b/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java
index b5c6b28693b3..2318614c241e 100644
--- a/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java
+++ b/server/src/test/java/org/elasticsearch/rest/BaseRestHandlerTests.java
@@ -235,6 +235,7 @@ public class BaseRestHandlerTests extends ESTestCase {
params.put("filter_path", randomAlphaOfLength(8));
params.put("pretty", randomFrom("true", "false", "", null));
params.put("human", null);
+ params.put("error_trace", randomFrom("true", "false", null));
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withParams(params).build();
RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1);
handler.handleRequest(request, channel, mockClient);
From 67f85ebea099fcc027997624c7577afef4311cd9 Mon Sep 17 00:00:00 2001
From: Tommaso Teofili
Date: Wed, 12 Jun 2024 13:38:32 +0200
Subject: [PATCH 32/34] DenseVectorMappingUpdateIT BWC fix (#109620)
---
muted-tests.yml | 2 --
.../upgrades/DenseVectorMappingUpdateIT.java | 14 +++++++++++---
2 files changed, 11 insertions(+), 5 deletions(-)
diff --git a/muted-tests.yml b/muted-tests.yml
index 03a0df5cc395..dea7741715a7 100644
--- a/muted-tests.yml
+++ b/muted-tests.yml
@@ -59,8 +59,6 @@ tests:
- class: org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvAppendTests
method: testEvaluateBlockWithoutNulls {TestCase=, }
issue: https://github.com/elastic/elasticsearch/issues/109409
-- class: DenseVectorMappingUpdateIT
- issue: "https://github.com/elastic/elasticsearch/issues/109571"
# Examples:
#
diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DenseVectorMappingUpdateIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DenseVectorMappingUpdateIT.java
index 99cbef655693..0830c1b766a2 100644
--- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DenseVectorMappingUpdateIT.java
+++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DenseVectorMappingUpdateIT.java
@@ -91,9 +91,13 @@ public class DenseVectorMappingUpdateIT extends AbstractRollingUpgradeTestCase {
.startObject("properties")
.startObject("embedding")
.field("type", "dense_vector")
+ .field("index", "true")
.field("dims", 4)
+ .field("similarity", "cosine")
.startObject("index_options")
.field("type", "hnsw")
+ .field("m", "16")
+ .field("ef_construction", "100")
.endObject()
.endObject()
.endObject()
@@ -109,7 +113,7 @@ public class DenseVectorMappingUpdateIT extends AbstractRollingUpgradeTestCase {
int expectedCount = 10;
- assertCount("test_index", expectedCount);
+ assertCount(indexName, expectedCount);
if (isUpgradedCluster() && clusterSupportsDenseVectorTypeUpdate()) {
Request updateMapping = new Request("PUT", "/" + indexName + "/_mapping");
@@ -118,9 +122,13 @@ public class DenseVectorMappingUpdateIT extends AbstractRollingUpgradeTestCase {
.startObject("properties")
.startObject("embedding")
.field("type", "dense_vector")
+ .field("index", "true")
.field("dims", 4)
+ .field("similarity", "cosine")
.startObject("index_options")
.field("type", "int8_hnsw")
+ .field("m", "16")
+ .field("ef_construction", "100")
.endObject()
.endObject()
.endObject()
@@ -132,7 +140,7 @@ public class DenseVectorMappingUpdateIT extends AbstractRollingUpgradeTestCase {
index.setJsonEntity(BULK2);
assertOK(client().performRequest(index));
expectedCount = 20;
- assertCount("test_index", expectedCount);
+ assertCount(indexName, expectedCount);
}
}
}
@@ -152,7 +160,7 @@ public class DenseVectorMappingUpdateIT extends AbstractRollingUpgradeTestCase {
Map, ?> response = entityAsMap(client().performRequest(new Request("GET", "_nodes")));
Map, ?> nodes = (Map, ?>) response.get("nodes");
- Predicate> nodeSupportsBulkApi = n -> Version.fromString(n.get("version").toString()).onOrAfter(Version.V_8_14_0);
+ Predicate> nodeSupportsBulkApi = n -> Version.fromString(n.get("version").toString()).onOrAfter(Version.V_8_15_0);
return nodes.values().stream().map(o -> (Map, ?>) o).allMatch(nodeSupportsBulkApi);
}
From d846223593b7986d4148a5fde9086cfba79bd232 Mon Sep 17 00:00:00 2001
From: Benjamin Trent
Date: Wed, 12 Jun 2024 07:51:42 -0400
Subject: [PATCH 33/34] Mute all collapse tests for 8.13 (#109594)
related to: https://github.com/elastic/elasticsearch/issues/109476
---
.../test/search.retrievers/10_standard_retriever.yml | 5 +++++
.../rest-api-spec/test/search/110_field_collapsing.yml | 6 +++++-
.../test/search/111_field_collapsing_with_max_score.yml | 5 +++++
.../test/search/112_field_collapsing_with_rescore.yml | 5 +++++
.../test/search/115_multiple_field_collapsing.yml | 6 ++++++
.../resources/rest-api-spec/test/60_collapse.yml | 6 +++++-
6 files changed, 31 insertions(+), 2 deletions(-)
diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.retrievers/10_standard_retriever.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.retrievers/10_standard_retriever.yml
index fcd5b49c984c..99d723664097 100644
--- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.retrievers/10_standard_retriever.yml
+++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.retrievers/10_standard_retriever.yml
@@ -473,6 +473,11 @@ setup:
---
"standard retriever collapse":
+ - skip:
+ reason: "https://github.com/elastic/elasticsearch/issues/109476"
+ known_issues:
+ - cluster_feature: "gte_v8.13.0"
+ fixed_by: "gte_v8.14.0"
- do:
search:
index: animals
diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/110_field_collapsing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/110_field_collapsing.yml
index c10d3c48259f..2eb8b729d2c6 100644
--- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/110_field_collapsing.yml
+++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/110_field_collapsing.yml
@@ -1,4 +1,9 @@
setup:
+ - skip:
+ reason: "https://github.com/elastic/elasticsearch/issues/109476"
+ known_issues:
+ - cluster_feature: "gte_v8.13.0"
+ fixed_by: "gte_v8.14.0"
- do:
indices.create:
index: test
@@ -85,7 +90,6 @@ setup:
---
"field collapsing and from":
-
- do:
search:
rest_total_hits_as_int: true
diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/111_field_collapsing_with_max_score.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/111_field_collapsing_with_max_score.yml
index 3c0364bb7834..b05916aa96e4 100644
--- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/111_field_collapsing_with_max_score.yml
+++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/111_field_collapsing_with_max_score.yml
@@ -1,4 +1,9 @@
setup:
+ - skip:
+ reason: "https://github.com/elastic/elasticsearch/issues/109476"
+ known_issues:
+ - cluster_feature: "gte_v8.13.0"
+ fixed_by: "gte_v8.14.0"
- requires:
cluster_features: ["gte_v8.10.0"]
reason: Collapse with max score was fixed in 8.10.0
diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/112_field_collapsing_with_rescore.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/112_field_collapsing_with_rescore.yml
index 67819881f1b5..08a2c9c89e33 100644
--- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/112_field_collapsing_with_rescore.yml
+++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/112_field_collapsing_with_rescore.yml
@@ -1,4 +1,9 @@
setup:
+ - skip:
+ reason: "https://github.com/elastic/elasticsearch/issues/109476"
+ known_issues:
+ - cluster_feature: "gte_v8.13.0"
+ fixed_by: "gte_v8.14.0"
- requires:
cluster_features: "gte_v8.15.0"
reason: Collapse with rescore added in 8.15.0
diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/115_multiple_field_collapsing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/115_multiple_field_collapsing.yml
index 5bc3f8cde65e..69fd8f61261c 100644
--- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/115_multiple_field_collapsing.yml
+++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/115_multiple_field_collapsing.yml
@@ -1,3 +1,9 @@
+setup:
+ - skip:
+ reason: "https://github.com/elastic/elasticsearch/issues/109476"
+ known_issues:
+ - cluster_feature: "gte_v8.13.0"
+ fixed_by: "gte_v8.14.0"
---
"two levels fields collapsing":
diff --git a/x-pack/plugin/mapper-unsigned-long/src/yamlRestTest/resources/rest-api-spec/test/60_collapse.yml b/x-pack/plugin/mapper-unsigned-long/src/yamlRestTest/resources/rest-api-spec/test/60_collapse.yml
index 0c87424a88fe..84f162cdae94 100644
--- a/x-pack/plugin/mapper-unsigned-long/src/yamlRestTest/resources/rest-api-spec/test/60_collapse.yml
+++ b/x-pack/plugin/mapper-unsigned-long/src/yamlRestTest/resources/rest-api-spec/test/60_collapse.yml
@@ -1,5 +1,9 @@
setup:
-
+ - skip:
+ reason: "https://github.com/elastic/elasticsearch/issues/109476"
+ known_issues:
+ - cluster_feature: "gte_v8.13.0"
+ fixed_by: "gte_v8.14.0"
- requires:
cluster_features: ["gte_v8.0.0"]
reason: "collapse on unsigned_long was added in 8.0"
From daf30f96dc700ceb944c055c9bf1b17ee54e3abf Mon Sep 17 00:00:00 2001
From: Armin Braun
Date: Wed, 12 Jun 2024 14:01:44 +0200
Subject: [PATCH 34/34] Introduce and use a few more empty response type
constants to o.e.c.lucene.Lucene (#109619)
Shortening a few more pieces of production code using constants,
potentially saving a little in code size and allocation in some cases.
---
.../elasticsearch/action/search/DfsQueryPhase.java | 3 ++-
.../action/search/SearchPhaseController.java | 6 +++---
.../elasticsearch/action/search/SearchResponse.java | 4 ++--
.../action/search/SearchResponseMerger.java | 3 ++-
.../org/elasticsearch/common/lucene/Lucene.java | 5 ++++-
.../grouping/SinglePassGroupingCollector.java | 3 ++-
.../lucene/grouping/TopFieldGroups.java | 3 ++-
.../java/org/elasticsearch/search/SearchHits.java | 2 +-
.../aggregations/metrics/TopHitsAggregator.java | 7 +------
.../org/elasticsearch/search/query/QueryPhase.java | 13 ++-----------
.../search/query/QueryPhaseCollectorManager.java | 2 +-
.../xpack/search/MutableSearchResponse.java | 4 ++--
.../xpack/security/profile/ProfileService.java | 8 ++------
13 files changed, 26 insertions(+), 37 deletions(-)
diff --git a/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java b/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java
index c5c35b1980a5..9ddac7f13eb5 100644
--- a/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java
+++ b/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java
@@ -9,6 +9,7 @@ package org.elasticsearch.action.search;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.join.ScoreMode;
+import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.index.query.NestedQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.SearchPhaseResult;
@@ -152,7 +153,7 @@ final class DfsQueryPhase extends SearchPhase {
scoreDocs.sort(Comparator.comparingInt(scoreDoc -> scoreDoc.doc));
String nestedPath = dfsKnnResults.getNestedPath();
QueryBuilder query = new KnnScoreDocQueryBuilder(
- scoreDocs.toArray(new ScoreDoc[0]),
+ scoreDocs.toArray(Lucene.EMPTY_SCORE_DOCS),
source.knnSearch().get(i).getField(),
source.knnSearch().get(i).getQueryVector()
).boost(source.knnSearch().get(i).boost()).queryName(source.knnSearch().get(i).queryName());
diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java
index 1d3859b9038f..55c754545cbb 100644
--- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java
+++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java
@@ -24,6 +24,7 @@ import org.apache.lucene.search.TotalHits.Relation;
import org.apache.lucene.util.SetOnce;
import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.common.io.stream.DelayableWriteable;
+import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore;
import org.elasticsearch.common.util.Maps;
import org.elasticsearch.common.util.concurrent.AtomicArray;
@@ -66,7 +67,6 @@ import java.util.function.Supplier;
import static org.elasticsearch.search.SearchService.DEFAULT_SIZE;
public final class SearchPhaseController {
- private static final ScoreDoc[] EMPTY_DOCS = new ScoreDoc[0];
private final BiFunction<
Supplier,
@@ -195,7 +195,7 @@ public final class SearchPhaseController {
return SortedTopDocs.EMPTY;
}
final TopDocs mergedTopDocs = mergeTopDocs(topDocs, size, ignoreFrom ? 0 : from);
- final ScoreDoc[] mergedScoreDocs = mergedTopDocs == null ? EMPTY_DOCS : mergedTopDocs.scoreDocs;
+ final ScoreDoc[] mergedScoreDocs = mergedTopDocs == null ? Lucene.EMPTY_SCORE_DOCS : mergedTopDocs.scoreDocs;
ScoreDoc[] scoreDocs = mergedScoreDocs;
int numSuggestDocs = 0;
if (reducedCompletionSuggestions.isEmpty() == false) {
@@ -907,6 +907,6 @@ public final class SearchPhaseController {
Object[] collapseValues,
int numberOfCompletionsSuggestions
) {
- public static final SortedTopDocs EMPTY = new SortedTopDocs(EMPTY_DOCS, false, null, null, null, 0);
+ public static final SortedTopDocs EMPTY = new SortedTopDocs(Lucene.EMPTY_SCORE_DOCS, false, null, null, null, 0);
}
}
diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java
index e2443566786a..45cb11869108 100644
--- a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java
+++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java
@@ -8,7 +8,6 @@
package org.elasticsearch.action.search;
-import org.apache.lucene.search.TotalHits;
import org.elasticsearch.TransportVersions;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.OriginalIndices;
@@ -18,6 +17,7 @@ import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
+import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.xcontent.ChunkedToXContentHelper;
import org.elasticsearch.common.xcontent.ChunkedToXContentObject;
@@ -1154,7 +1154,7 @@ public class SearchResponse extends ActionResponse implements ChunkedToXContentO
// public for tests
public static SearchResponse empty(Supplier tookInMillisSupplier, Clusters clusters) {
return new SearchResponse(
- SearchHits.empty(new TotalHits(0L, TotalHits.Relation.EQUAL_TO), Float.NaN),
+ SearchHits.empty(Lucene.TOTAL_HITS_EQUAL_TO_ZERO, Float.NaN),
InternalAggregations.EMPTY,
null,
false,
diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java
index ae8c749475c5..d393adc4e26d 100644
--- a/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java
+++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java
@@ -18,6 +18,7 @@ import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.search.SearchPhaseController.TopDocsStats;
import org.elasticsearch.action.search.SearchResponse.Clusters;
import org.elasticsearch.action.search.TransportSearchAction.SearchTimeProvider;
+import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore;
import org.elasticsearch.core.Releasable;
import org.elasticsearch.index.shard.ShardId;
@@ -177,7 +178,7 @@ public final class SearchResponseMerger implements Releasable {
final TotalHits totalHits;
if (searchHits.getTotalHits() == null) {
// in case we didn't track total hits, we get null from each cluster, but we need to set 0 eq to the TopDocs
- totalHits = new TotalHits(0, TotalHits.Relation.EQUAL_TO);
+ totalHits = Lucene.TOTAL_HITS_EQUAL_TO_ZERO;
assert trackTotalHits == null || trackTotalHits == false;
trackTotalHits = false;
} else {
diff --git a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java
index 31a4ca97aad6..36b3076c29a3 100644
--- a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java
+++ b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java
@@ -101,7 +101,10 @@ public class Lucene {
public static final ScoreDoc[] EMPTY_SCORE_DOCS = new ScoreDoc[0];
- public static final TopDocs EMPTY_TOP_DOCS = new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), EMPTY_SCORE_DOCS);
+ public static final TotalHits TOTAL_HITS_EQUAL_TO_ZERO = new TotalHits(0, TotalHits.Relation.EQUAL_TO);
+ public static final TotalHits TOTAL_HITS_GREATER_OR_EQUAL_TO_ZERO = new TotalHits(0, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO);
+
+ public static final TopDocs EMPTY_TOP_DOCS = new TopDocs(TOTAL_HITS_EQUAL_TO_ZERO, EMPTY_SCORE_DOCS);
private Lucene() {}
diff --git a/server/src/main/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollector.java b/server/src/main/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollector.java
index b11a034ce4e4..85682b9e4d50 100644
--- a/server/src/main/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollector.java
+++ b/server/src/main/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollector.java
@@ -36,6 +36,7 @@ import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TotalHits;
import org.apache.lucene.search.grouping.GroupSelector;
+import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.util.Maps;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.index.mapper.MappedFieldType;
@@ -202,7 +203,7 @@ public class SinglePassGroupingCollector extends SimpleCollector {
if (groupMap.size() <= groupOffset) {
TotalHits totalHits = new TotalHits(totalHitCount, TotalHits.Relation.EQUAL_TO);
- return new TopFieldGroups(groupField, totalHits, new ScoreDoc[0], groupSort.getSort(), new Object[0]);
+ return new TopFieldGroups(groupField, totalHits, Lucene.EMPTY_SCORE_DOCS, groupSort.getSort(), new Object[0]);
}
if (orderedGroups == null) {
diff --git a/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java b/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java
index 8e5efa8a880b..350c7d91e2e4 100644
--- a/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java
+++ b/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java
@@ -16,6 +16,7 @@ import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TopFieldDocs;
import org.apache.lucene.search.TotalHits;
import org.apache.lucene.util.PriorityQueue;
+import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.util.CollectionUtils;
import java.util.ArrayList;
@@ -225,7 +226,7 @@ public final class TopFieldGroups extends TopFieldDocs {
queue.pop();
}
}
- hits = hitList.toArray(new ScoreDoc[0]);
+ hits = hitList.toArray(Lucene.EMPTY_SCORE_DOCS);
values = groupList.toArray(new Object[0]);
}
TotalHits totalHits = new TotalHits(totalHitCount, totalHitsRelation);
diff --git a/server/src/main/java/org/elasticsearch/search/SearchHits.java b/server/src/main/java/org/elasticsearch/search/SearchHits.java
index 15b83b202fd9..b2bc3097af18 100644
--- a/server/src/main/java/org/elasticsearch/search/SearchHits.java
+++ b/server/src/main/java/org/elasticsearch/search/SearchHits.java
@@ -34,7 +34,7 @@ import java.util.Objects;
public final class SearchHits implements Writeable, ChunkedToXContent, RefCounted, Iterable {
public static final SearchHit[] EMPTY = new SearchHit[0];
- public static final SearchHits EMPTY_WITH_TOTAL_HITS = SearchHits.empty(new TotalHits(0, Relation.EQUAL_TO), 0);
+ public static final SearchHits EMPTY_WITH_TOTAL_HITS = SearchHits.empty(Lucene.TOTAL_HITS_EQUAL_TO_ZERO, 0);
public static final SearchHits EMPTY_WITHOUT_TOTAL_HITS = SearchHits.empty(null, 0);
private final SearchHit[] hits;
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java
index a3cf20d0b9b7..e61465fbc5e3 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java
@@ -20,7 +20,6 @@ import org.apache.lucene.search.TopDocsCollector;
import org.apache.lucene.search.TopFieldCollector;
import org.apache.lucene.search.TopFieldDocs;
import org.apache.lucene.search.TopScoreDocCollector;
-import org.apache.lucene.search.TotalHits;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.search.MaxScoreCollector;
import org.elasticsearch.common.lucene.Lucene;
@@ -233,11 +232,7 @@ class TopHitsAggregator extends MetricsAggregator {
public InternalTopHits buildEmptyAggregation() {
TopDocs topDocs;
if (subSearchContext.sort() != null) {
- topDocs = new TopFieldDocs(
- new TotalHits(0, TotalHits.Relation.EQUAL_TO),
- new FieldDoc[0],
- subSearchContext.sort().sort.getSort()
- );
+ topDocs = new TopFieldDocs(Lucene.TOTAL_HITS_EQUAL_TO_ZERO, new FieldDoc[0], subSearchContext.sort().sort.getSort());
} else {
topDocs = Lucene.EMPTY_TOP_DOCS;
}
diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java
index 0d2610aa3428..af0240e9497f 100644
--- a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java
+++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java
@@ -22,7 +22,6 @@ import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.TopDocs;
-import org.apache.lucene.search.TotalHits;
import org.apache.lucene.search.Weight;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore;
@@ -76,11 +75,7 @@ public class QueryPhase {
searchContext.size(0);
QueryPhase.executeQuery(searchContext);
} else {
- searchContext.queryResult()
- .topDocs(
- new TopDocsAndMaxScore(new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), Lucene.EMPTY_SCORE_DOCS), Float.NaN),
- new DocValueFormat[0]
- );
+ searchContext.queryResult().topDocs(new TopDocsAndMaxScore(Lucene.EMPTY_TOP_DOCS, Float.NaN), new DocValueFormat[0]);
}
List rrfRankResults = new ArrayList<>();
@@ -124,11 +119,7 @@ public class QueryPhase {
static void executeQuery(SearchContext searchContext) throws QueryPhaseExecutionException {
if (searchContext.hasOnlySuggest()) {
SuggestPhase.execute(searchContext);
- searchContext.queryResult()
- .topDocs(
- new TopDocsAndMaxScore(new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), Lucene.EMPTY_SCORE_DOCS), Float.NaN),
- new DocValueFormat[0]
- );
+ searchContext.queryResult().topDocs(new TopDocsAndMaxScore(Lucene.EMPTY_TOP_DOCS, Float.NaN), new DocValueFormat[0]);
return;
}
diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollectorManager.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollectorManager.java
index 2286eb2e69f8..22b5f3d8dcaf 100644
--- a/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollectorManager.java
+++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollectorManager.java
@@ -400,7 +400,7 @@ abstract class QueryPhaseCollectorManager implements CollectorManager(totalShards - skippedShards);
this.isPartial = true;
this.threadContext = threadContext;
- this.totalHits = EMPTY_TOTAL_HITS;
+ this.totalHits = Lucene.TOTAL_HITS_GREATER_OR_EQUAL_TO_ZERO;
this.localClusterComplete = false;
}
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java
index 55be659512c5..dd2377ec773c 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java
@@ -9,7 +9,6 @@ package org.elasticsearch.xpack.security.profile;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
-import org.apache.lucene.search.TotalHits;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.ExceptionsHelper;
@@ -40,6 +39,7 @@ import org.elasticsearch.client.internal.OriginSettingClient;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.XContentHelper;
@@ -265,11 +265,7 @@ public class ProfileService {
public void suggestProfile(SuggestProfilesRequest request, TaskId parentTaskId, ActionListener listener) {
tryFreezeAndCheckIndex(listener.map(response -> {
assert response == null : "only null response can reach here";
- return new SuggestProfilesResponse(
- new SuggestProfilesResponse.ProfileHit[] {},
- 0,
- new TotalHits(0, TotalHits.Relation.EQUAL_TO)
- );
+ return new SuggestProfilesResponse(new SuggestProfilesResponse.ProfileHit[] {}, 0, Lucene.TOTAL_HITS_EQUAL_TO_ZERO);
}), SEARCH_SHARDS).ifPresent(frozenProfileIndex -> {
final SearchRequest searchRequest = buildSearchRequestForSuggest(request, parentTaskId);