Merge main into multi-project

This commit is contained in:
Niels Bauman 2024-12-24 18:26:13 +01:00
commit 3738202979
113 changed files with 2251 additions and 848 deletions

View file

@ -83,6 +83,8 @@ public class VectorScorerBenchmark {
RandomVectorScorer luceneDotScorerQuery;
RandomVectorScorer nativeDotScorerQuery;
RandomVectorScorer luceneSqrScorerQuery;
RandomVectorScorer nativeSqrScorerQuery;
@Setup
public void setup() throws IOException {
@ -130,6 +132,8 @@ public class VectorScorerBenchmark {
}
luceneDotScorerQuery = luceneScorer(values, VectorSimilarityFunction.DOT_PRODUCT, queryVec);
nativeDotScorerQuery = factory.getInt7SQVectorScorer(VectorSimilarityFunction.DOT_PRODUCT, values, queryVec).get();
luceneSqrScorerQuery = luceneScorer(values, VectorSimilarityFunction.EUCLIDEAN, queryVec);
nativeSqrScorerQuery = factory.getInt7SQVectorScorer(VectorSimilarityFunction.EUCLIDEAN, values, queryVec).get();
// sanity
var f1 = dotProductLucene();
@ -157,6 +161,12 @@ public class VectorScorerBenchmark {
if (q1 != q2) {
throw new AssertionError("query: lucene[" + q1 + "] != " + "native[" + q2 + "]");
}
var sqr1 = squareDistanceLuceneQuery();
var sqr2 = squareDistanceNativeQuery();
if (sqr1 != sqr2) {
throw new AssertionError("query: lucene[" + q1 + "] != " + "native[" + q2 + "]");
}
}
@TearDown
@ -217,6 +227,16 @@ public class VectorScorerBenchmark {
return 1 / (1f + adjustedDistance);
}
@Benchmark
public float squareDistanceLuceneQuery() throws IOException {
return luceneSqrScorerQuery.score(1);
}
@Benchmark
public float squareDistanceNativeQuery() throws IOException {
return nativeSqrScorerQuery.score(1);
}
QuantizedByteVectorValues vectorValues(int dims, int size, IndexInput in, VectorSimilarityFunction sim) throws IOException {
var sq = new ScalarQuantizer(0.1f, 0.9f, (byte) 7);
var slice = in.slice("values", 0, in.length());

View file

@ -10,6 +10,7 @@
package org.elasticsearch.gradle.internal.conventions;
import org.elasticsearch.gradle.internal.conventions.info.GitInfo;
import org.elasticsearch.gradle.internal.conventions.info.GitInfoValueSource;
import org.elasticsearch.gradle.internal.conventions.util.Util;
import org.gradle.api.Plugin;
import org.gradle.api.Project;
@ -18,38 +19,28 @@ import org.gradle.api.provider.Property;
import org.gradle.api.provider.Provider;
import org.gradle.api.provider.ProviderFactory;
import javax.inject.Inject;
import java.io.File;
class GitInfoPlugin implements Plugin<Project> {
import javax.inject.Inject;
public abstract class GitInfoPlugin implements Plugin<Project> {
private ProviderFactory factory;
private ObjectFactory objectFactory;
private Provider<String> revision;
private Property<GitInfo> gitInfo;
@Inject
GitInfoPlugin(ProviderFactory factory, ObjectFactory objectFactory) {
public GitInfoPlugin(ProviderFactory factory) {
this.factory = factory;
this.objectFactory = objectFactory;
}
@Override
public void apply(Project project) {
File rootDir = Util.locateElasticsearchWorkspace(project.getGradle());
gitInfo = objectFactory.property(GitInfo.class).value(factory.provider(() ->
GitInfo.gitInfo(rootDir)
));
gitInfo.disallowChanges();
gitInfo.finalizeValueOnRead();
revision = gitInfo.map(info -> info.getRevision() == null ? info.getRevision() : "main");
getGitInfo().convention(factory.of(GitInfoValueSource.class, spec -> { spec.getParameters().getPath().set(rootDir); }));
revision = getGitInfo().map(info -> info.getRevision() == null ? info.getRevision() : "main");
}
public Property<GitInfo> getGitInfo() {
return gitInfo;
}
public abstract Property<GitInfo> getGitInfo();
public Provider<String> getRevision() {
return revision;

View file

@ -15,9 +15,10 @@ import org.gradle.api.provider.MapProperty;
import org.gradle.api.provider.Provider;
import org.gradle.api.provider.ProviderFactory;
import javax.inject.Inject;
import java.util.Map;
import javax.inject.Inject;
public class LicensingPlugin implements Plugin<Project> {
static final String ELASTIC_LICENSE_URL_PREFIX = "https://raw.githubusercontent.com/elastic/elasticsearch/";
static final String ELASTIC_LICENSE_URL_POSTFIX = "/licenses/ELASTIC-LICENSE-2.0.txt";
@ -33,22 +34,31 @@ public class LicensingPlugin implements Plugin<Project> {
@Override
public void apply(Project project) {
Provider<String> revision = project.getRootProject().getPlugins().apply(GitInfoPlugin.class).getRevision();
Provider<String> licenseCommitProvider = providerFactory.provider(() ->
isSnapshotVersion(project) ? revision.get() : "v" + project.getVersion()
Provider<String> licenseCommitProvider = providerFactory.provider(
() -> isSnapshotVersion(project) ? revision.get() : "v" + project.getVersion()
);
Provider<String> elasticLicenseURL = licenseCommitProvider.map(licenseCommit -> ELASTIC_LICENSE_URL_PREFIX +
licenseCommit + ELASTIC_LICENSE_URL_POSTFIX);
Provider<String> agplLicenseURL = licenseCommitProvider.map(licenseCommit -> ELASTIC_LICENSE_URL_PREFIX +
licenseCommit + AGPL_ELASTIC_LICENSE_URL_POSTFIX);
Provider<String> elasticLicenseURL = licenseCommitProvider.map(
licenseCommit -> ELASTIC_LICENSE_URL_PREFIX + licenseCommit + ELASTIC_LICENSE_URL_POSTFIX
);
Provider<String> agplLicenseURL = licenseCommitProvider.map(
licenseCommit -> ELASTIC_LICENSE_URL_PREFIX + licenseCommit + AGPL_ELASTIC_LICENSE_URL_POSTFIX
);
// But stick the Elastic license url in project.ext so we can get it if we need to switch to it
project.getExtensions().getExtraProperties().set("elasticLicenseUrl", elasticLicenseURL);
MapProperty<String, String> licensesProperty = project.getObjects().mapProperty(String.class, String.class).convention(
providerFactory.provider(() -> Map.of(
"Server Side Public License, v 1", "https://www.mongodb.com/licensing/server-side-public-license",
"Elastic License 2.0", elasticLicenseURL.get(),
"GNU Affero General Public License Version 3", agplLicenseURL.get())
MapProperty<String, Provider<String>> licensesProperty = project.getObjects()
.mapProperty(String.class, (Class<Provider<String>>) (Class<?>) Provider.class)
.convention(
providerFactory.provider(
() -> Map.of(
"Server Side Public License, v 1",
providerFactory.provider(() -> "https://www.mongodb.com/licensing/server-side-public-license"),
"Elastic License 2.0",
elasticLicenseURL,
"GNU Affero General Public License Version 3",
agplLicenseURL
)
)
);

View file

@ -28,6 +28,7 @@ import org.gradle.api.plugins.ExtensionContainer;
import org.gradle.api.plugins.JavaLibraryPlugin;
import org.gradle.api.plugins.JavaPlugin;
import org.gradle.api.provider.MapProperty;
import org.gradle.api.provider.Provider;
import org.gradle.api.provider.ProviderFactory;
import org.gradle.api.publish.PublishingExtension;
import org.gradle.api.publish.maven.MavenPublication;
@ -42,6 +43,7 @@ import org.w3c.dom.Element;
import java.io.File;
import java.util.Map;
import java.util.concurrent.Callable;
import javax.inject.Inject;
public class PublishPlugin implements Plugin<Project> {
@ -81,7 +83,7 @@ public class PublishPlugin implements Plugin<Project> {
}
});
@SuppressWarnings("unchecked")
var projectLicenses = (MapProperty<String, String>) project.getExtensions().getExtraProperties().get("projectLicenses");
var projectLicenses = (MapProperty<String, Provider<String>>) project.getExtensions().getExtraProperties().get("projectLicenses");
publication.getPom().withXml(xml -> {
var node = xml.asNode();
node.appendNode("inceptionYear", "2009");
@ -89,7 +91,7 @@ public class PublishPlugin implements Plugin<Project> {
projectLicenses.get().entrySet().stream().sorted(Map.Entry.comparingByKey()).forEach(entry -> {
Node license = licensesNode.appendNode("license");
license.appendNode("name", entry.getKey());
license.appendNode("url", entry.getValue());
license.appendNode("url", entry.getValue().get());
license.appendNode("distribution", "repo");
});
var developer = node.appendNode("developers").appendNode("developer");
@ -194,7 +196,6 @@ public class PublishPlugin implements Plugin<Project> {
});
}
/**
* Format the generated pom files to be in a sort of reproducible order.
*/

View file

@ -22,6 +22,7 @@ import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Objects;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
@ -190,4 +191,15 @@ public class GitInfo {
}
}
@Override
public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
GitInfo gitInfo = (GitInfo) o;
return Objects.equals(revision, gitInfo.revision) && Objects.equals(origin, gitInfo.origin);
}
@Override
public int hashCode() {
return Objects.hash(revision, origin);
}
}

View file

@ -0,0 +1,22 @@
package org.elasticsearch.gradle.internal.conventions.info;
import org.gradle.api.provider.Property;
import org.gradle.api.provider.ValueSource;
import org.gradle.api.provider.ValueSourceParameters;
import org.jetbrains.annotations.Nullable;
import java.io.File;
public abstract class GitInfoValueSource implements ValueSource<GitInfo, GitInfoValueSource.Parameters> {
@Nullable
@Override
public GitInfo obtain() {
File path = getParameters().getPath().get();
return GitInfo.gitInfo(path);
}
public interface Parameters extends ValueSourceParameters {
Property<File> getPath();
}
}

View file

@ -45,7 +45,8 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
file("build/distributions/hello-world-1.0-javadoc.jar").exists()
file("build/distributions/hello-world-1.0-sources.jar").exists()
file("build/distributions/hello-world-1.0.pom").exists()
assertXmlEquals(file("build/distributions/hello-world-1.0.pom").text, """
assertXmlEquals(
file("build/distributions/hello-world-1.0.pom").text, """
<project xmlns="http://maven.apache.org/POM/4.0.0" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<!-- This module was also published with a richer model, Gradle metadata, -->
<!-- which should be used instead. Do not delete the following line which -->
@ -130,7 +131,8 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
file("build/distributions/hello-world-1.0-javadoc.jar").exists()
file("build/distributions/hello-world-1.0-sources.jar").exists()
file("build/distributions/hello-world-1.0.pom").exists()
assertXmlEquals(file("build/distributions/hello-world-1.0.pom").text, """
assertXmlEquals(
file("build/distributions/hello-world-1.0.pom").text, """
<project xmlns="http://maven.apache.org/POM/4.0.0" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<modelVersion>4.0.0</modelVersion>
<groupId>org.acme</groupId>
@ -219,7 +221,8 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
file("build/distributions/hello-world-1.0-javadoc.jar").exists()
file("build/distributions/hello-world-1.0-sources.jar").exists()
file("build/distributions/hello-world-1.0.pom").exists()
assertXmlEquals(file("build/distributions/hello-world-1.0.pom").text, """
assertXmlEquals(
file("build/distributions/hello-world-1.0.pom").text, """
<project xmlns="http://maven.apache.org/POM/4.0.0" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<modelVersion>4.0.0</modelVersion>
<groupId>org.acme</groupId>
@ -312,7 +315,8 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
file("build/distributions/hello-world-plugin-1.0-javadoc.jar").exists()
file("build/distributions/hello-world-plugin-1.0-sources.jar").exists()
file("build/distributions/hello-world-plugin-1.0.pom").exists()
assertXmlEquals(file("build/distributions/hello-world-plugin-1.0.pom").text, """
assertXmlEquals(
file("build/distributions/hello-world-plugin-1.0.pom").text, """
<project xmlns="http://maven.apache.org/POM/4.0.0" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<!-- This module was also published with a richer model, Gradle metadata, -->
<!-- which should be used instead. Do not delete the following line which -->
@ -389,7 +393,8 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
then:
result.task(":generatePom").outcome == TaskOutcome.SUCCESS
file("build/distributions/hello-world-plugin-2.0.pom").exists()
assertXmlEquals(file("build/distributions/hello-world-plugin-2.0.pom").text, """
assertXmlEquals(
file("build/distributions/hello-world-plugin-2.0.pom").text, """
<project xmlns="http://maven.apache.org/POM/4.0.0" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<!-- This module was also published with a richer model, Gradle metadata, -->
<!-- which should be used instead. Do not delete the following line which -->
@ -439,7 +444,7 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
// scm info only added for internal builds
internalBuild()
buildFile << """
buildParams.setGitOrigin("https://some-repo.com/repo.git")
buildParams.setGitOrigin(project.providers.provider(() -> "https://some-repo.com/repo.git"))
apply plugin:'elasticsearch.java'
apply plugin:'elasticsearch.publish'
@ -447,7 +452,7 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
group = 'org.acme'
description = "just a test project"
ext.projectLicenses.set(['The Apache Software License, Version 2.0': 'http://www.apache.org/licenses/LICENSE-2.0'])
ext.projectLicenses.set(['The Apache Software License, Version 2.0': project.providers.provider(() -> 'http://www.apache.org/licenses/LICENSE-2.0')])
"""
when:
@ -456,7 +461,8 @@ class PublishPluginFuncTest extends AbstractGradleFuncTest {
then:
result.task(":generatePom").outcome == TaskOutcome.SUCCESS
file("build/distributions/hello-world-1.0.pom").exists()
assertXmlEquals(file("build/distributions/hello-world-1.0.pom").text, """
assertXmlEquals(
file("build/distributions/hello-world-1.0.pom").text, """
<project xmlns="http://maven.apache.org/POM/4.0.0" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<!-- This module was also published with a richer model, Gradle metadata, -->
<!-- which should be used instead. Do not delete the following line which -->

View file

@ -166,7 +166,7 @@ if (providers.systemProperty('idea.active').getOrNull() == 'true') {
tasks.register('buildDependencyArtifacts') {
group = 'ide'
description = 'Builds artifacts needed as dependency for IDE modules'
dependsOn([':plugins:repository-hdfs:hadoop-client-api:shadowJar',
dependsOn([':plugins:repository-hdfs:hadoop-client-api:jar',
':x-pack:plugin:esql:compute:ann:jar',
':x-pack:plugin:esql:compute:gen:jar',
':server:generateModulesList',

View file

@ -27,6 +27,7 @@ import org.gradle.api.plugins.BasePlugin;
import org.gradle.api.plugins.JavaLibraryPlugin;
import org.gradle.api.plugins.JavaPlugin;
import org.gradle.api.provider.Property;
import org.gradle.api.provider.Provider;
import org.gradle.api.tasks.TaskProvider;
import org.gradle.api.tasks.bundling.Jar;
import org.gradle.api.tasks.javadoc.Javadoc;
@ -120,12 +121,12 @@ public class ElasticsearchJavaPlugin implements Plugin<Project> {
}
private static void configureJarManifest(Project project, BuildParameterExtension buildParams) {
String gitOrigin = buildParams.getGitOrigin();
String gitRevision = buildParams.getGitRevision();
Provider<String> gitOrigin = buildParams.getGitOrigin();
Provider<String> gitRevision = buildParams.getGitRevision();
project.getPlugins().withType(InfoBrokerPlugin.class).whenPluginAdded(manifestPlugin -> {
manifestPlugin.add("Module-Origin", toStringable(() -> gitOrigin));
manifestPlugin.add("Change", toStringable(() -> gitRevision));
manifestPlugin.add("Module-Origin", toStringable(() -> gitOrigin.get()));
manifestPlugin.add("Change", toStringable(() -> gitRevision.get()));
manifestPlugin.add("X-Compile-Elasticsearch-Version", toStringable(VersionProperties::getElasticsearch));
manifestPlugin.add("X-Compile-Lucene-Version", toStringable(VersionProperties::getLucene));
manifestPlugin.add(

View file

@ -46,9 +46,9 @@ public interface BuildParameterExtension {
Provider<String> getRuntimeJavaDetails();
String getGitRevision();
Provider<String> getGitRevision();
String getGitOrigin();
Provider<String> getGitOrigin();
ZonedDateTime getBuildDate();

View file

@ -36,7 +36,7 @@ public abstract class DefaultBuildParameterExtension implements BuildParameterEx
private final Provider<JavaVersion> runtimeJavaVersion;
private final Provider<? extends Action<JavaToolchainSpec>> javaToolChainSpec;
private final Provider<String> runtimeJavaDetails;
private final String gitRevision;
private final Provider<String> gitRevision;
private transient AtomicReference<ZonedDateTime> buildDate = new AtomicReference<>();
private final String testSeed;
@ -46,7 +46,7 @@ public abstract class DefaultBuildParameterExtension implements BuildParameterEx
// not final for testing
private Provider<BwcVersions> bwcVersions;
private String gitOrigin;
private Provider<String> gitOrigin;
public DefaultBuildParameterExtension(
ProviderFactory providers,
@ -59,8 +59,8 @@ public abstract class DefaultBuildParameterExtension implements BuildParameterEx
JavaVersion minimumCompilerVersion,
JavaVersion minimumRuntimeVersion,
JavaVersion gradleJavaVersion,
String gitRevision,
String gitOrigin,
Provider<String> gitRevision,
Provider<String> gitOrigin,
String testSeed,
boolean isCi,
int defaultParallel,
@ -155,12 +155,12 @@ public abstract class DefaultBuildParameterExtension implements BuildParameterEx
}
@Override
public String getGitRevision() {
public Provider<String> getGitRevision() {
return gitRevision;
}
@Override
public String getGitOrigin() {
public Provider<String> getGitOrigin() {
return gitOrigin;
}
@ -239,7 +239,7 @@ public abstract class DefaultBuildParameterExtension implements BuildParameterEx
}
// for testing; not part of public api
public void setGitOrigin(String gitOrigin) {
public void setGitOrigin(Provider<String> gitOrigin) {
this.gitOrigin = gitOrigin;
}
}

View file

@ -14,6 +14,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.io.IOUtils;
import org.elasticsearch.gradle.VersionProperties;
import org.elasticsearch.gradle.internal.BwcVersions;
import org.elasticsearch.gradle.internal.conventions.GitInfoPlugin;
import org.elasticsearch.gradle.internal.conventions.info.GitInfo;
import org.elasticsearch.gradle.internal.conventions.info.ParallelDetector;
import org.elasticsearch.gradle.internal.conventions.util.Util;
@ -96,6 +97,8 @@ public class GlobalBuildInfoPlugin implements Plugin<Project> {
}
this.project = project;
project.getPlugins().apply(JvmToolchainsPlugin.class);
Provider<GitInfo> gitInfo = project.getPlugins().apply(GitInfoPlugin.class).getGitInfo();
toolChainService = project.getExtensions().getByType(JavaToolchainService.class);
GradleVersion minimumGradleVersion = GradleVersion.version(getResourceContents("/minimumGradleVersion"));
if (GradleVersion.current().compareTo(minimumGradleVersion) < 0) {
@ -111,8 +114,6 @@ public class GlobalBuildInfoPlugin implements Plugin<Project> {
? explicitRuntimeJavaHome
: resolveJavaHomeFromToolChainService(VersionProperties.getBundledJdkMajorVersion());
GitInfo gitInfo = GitInfo.gitInfo(project.getRootDir());
Provider<JvmInstallationMetadata> runtimeJdkMetaData = actualRuntimeJavaHome.map(
runtimeJavaHome -> metadataDetector.getMetadata(getJavaInstallation(runtimeJavaHome))
);
@ -143,8 +144,8 @@ public class GlobalBuildInfoPlugin implements Plugin<Project> {
minimumCompilerVersion,
minimumRuntimeVersion,
Jvm.current().getJavaVersion(),
gitInfo.getRevision(),
gitInfo.getOrigin(),
gitInfo.map(g -> g.getRevision()),
gitInfo.map(g -> g.getOrigin()),
getTestSeed(),
System.getenv("JENKINS_URL") != null || System.getenv("BUILDKITE_BUILD_URL") != null || System.getProperty("isCI") != null,
ParallelDetector.findDefaultParallel(project),

View file

@ -9,9 +9,9 @@
package org.elasticsearch.gradle.internal.info
import spock.lang.Ignore
import spock.lang.Specification
import org.elasticsearch.gradle.internal.BwcVersions
import org.gradle.api.JavaVersion
import org.gradle.api.Project
import org.gradle.api.provider.Provider
@ -31,6 +31,7 @@ class BuildParameterExtensionSpec extends Specification {
ProjectBuilder projectBuilder = new ProjectBuilder()
@Ignore
def "#getterName is cached anc concurrently accessible"() {
given:
def project = projectBuilder.build()
@ -85,8 +86,8 @@ class BuildParameterExtensionSpec extends Specification {
JavaVersion.VERSION_11,
JavaVersion.VERSION_11,
JavaVersion.VERSION_11,
"gitRevision",
"gitOrigin",
providerMock(),
providerMock(),
"testSeed",
false,
5,

View file

@ -36,7 +36,7 @@ base {
}
// LLRC is licenses under Apache 2.0
projectLicenses.set(['The Apache Software License, Version 2.0': 'http://www.apache.org/licenses/LICENSE-2.0'])
projectLicenses.set(['The Apache Software License, Version 2.0': providers.provider(() -> 'http://www.apache.org/licenses/LICENSE-2.0')])
licenseFile.set(rootProject.file('licenses/APACHE-LICENSE-2.0.txt'))
dependencies {

View file

@ -32,7 +32,7 @@ base {
}
// rest client sniffer is licenses under Apache 2.0
projectLicenses.set(['The Apache Software License, Version 2.0': 'http://www.apache.org/licenses/LICENSE-2.0'])
projectLicenses.set(['The Apache Software License, Version 2.0': providers.provider(() -> 'http://www.apache.org/licenses/LICENSE-2.0')])
licenseFile.set(rootProject.file('licenses/APACHE-LICENSE-2.0.txt'))
dependencies {

View file

@ -18,7 +18,7 @@ java {
group = "${group}.client.test"
// rest client sniffer is licenses under Apache 2.0
projectLicenses.set(['The Apache Software License, Version 2.0': 'http://www.apache.org/licenses/LICENSE-2.0'])
projectLicenses.set(['The Apache Software License, Version 2.0': providers.provider(() -> 'http://www.apache.org/licenses/LICENSE-2.0')])
licenseFile.set(rootProject.file('licenses/APACHE-LICENSE-2.0.txt'))
dependencies {

View file

@ -22,7 +22,7 @@
<% if (docker_base == 'iron_bank') { %>
ARG BASE_REGISTRY=registry1.dso.mil
ARG BASE_IMAGE=ironbank/redhat/ubi/ubi9
ARG BASE_TAG=9.4
ARG BASE_TAG=9.5
<% } %>
################################################################################

View file

@ -0,0 +1,5 @@
pr: 116868
summary: Run `TransportGetComponentTemplateAction` on local node
area: Indices APIs
type: enhancement
issues: []

View file

@ -0,0 +1,5 @@
pr: 117214
summary: Returning ignored fields in the simulate ingest API
area: Ingest Node
type: enhancement
issues: []

View file

@ -0,0 +1,6 @@
pr: 118016
summary: Propagate status codes from shard failures appropriately
area: Search
type: enhancement
issues:
- 118482

View file

@ -0,0 +1,6 @@
pr: 118870
summary: Rewrite TO_UPPER/TO_LOWER comparisons
area: ES|QL
type: enhancement
issues:
- 118304

View file

@ -0,0 +1,6 @@
pr: 118999
summary: Fix loss of context in the inference API for streaming APIs
area: Machine Learning
type: bug
issues:
- 119000

View file

@ -0,0 +1,6 @@
pr: 119134
summary: Handle `index.mapping.ignore_malformed` in downsampling
area: Downsampling
type: bug
issues:
- 119075

View file

@ -67,7 +67,7 @@ Wildcard (`*`) expressions are supported.
include::{docdir}/rest-api/common-parms.asciidoc[tag=flat-settings]
include::{docdir}/rest-api/common-parms.asciidoc[tag=local]
include::{docdir}/rest-api/common-parms.asciidoc[tag=local-deprecated-9.0.0]
include::{docdir}/rest-api/common-parms.asciidoc[tag=master-timeout]

View file

@ -668,6 +668,16 @@ node only. Defaults to `false`, which means information is retrieved from
the master node.
end::local[]
tag::local-deprecated-9.0.0[]
`local`::
(Optional, Boolean) If `true`, the request retrieves information from the local
node only. Defaults to `false`, which means information is retrieved from
the master node.
+
deprecated::[9.0.0, "The `?local` query parameter to this API has no effect, is now deprecated, and will be removed in a future version."]
end::local-deprecated-9.0.0[]
tag::mappings[]
`mappings`::
+

View file

@ -207,16 +207,8 @@ GET /_xpack/usage
"inference": {
"available" : true,
"enabled" : true,
"models" : [{
"service": "elasticsearch",
"task_type": "SPARSE_EMBEDDING",
"count": 1
},
{
"service": "elasticsearch",
"task_type": "TEXT_EMBEDDING",
"count": 1
},
"models" : [
...
]
},
"logstash" : {
@ -523,7 +515,10 @@ GET /_xpack/usage
"available": true,
"enabled": false,
"indices_count": 0,
"indices_with_synthetic_source": 0
"indices_with_synthetic_source": 0,
"num_docs": 0,
"size_in_bytes": 0,
"has_custom_cutoff_date": false
}
}
------------------------------------------------------------
@ -535,6 +530,7 @@ GET /_xpack/usage
// TESTRESPONSE[s/"policy_stats" : \[[^\]]*\]/"policy_stats" : $body.$_path/]
// TESTRESPONSE[s/"slm" : \{[^\}]*\},/"slm" : $body.$_path,/]
// TESTRESPONSE[s/"health_api" : \{[^\}]*\}\s*\}/"health_api" : $body.$_path/]
// TESTRESPONSE[s/"models" : \[[^\]]*\]/"models" : $body.$_path/]
// TESTRESPONSE[s/"data_streams" : \{[^\}]*\},/"data_streams" : $body.$_path,/]
// TESTRESPONSE[s/ : true/ : $body.$_path/]
// TESTRESPONSE[s/ : false/ : $body.$_path/]
@ -551,4 +547,5 @@ GET /_xpack/usage
// 5. All of the numbers and strings on the right hand side of *every* field in
// the response are ignored. So we're really only asserting things about the
// the shape of this response, not the values in it.
// 6. Ignore the contents of data streams until the failure store is tech preview.
// 6. Ignore the contents of the `inference.models` array because the models might not yet have been initialized
// 7. Ignore the contents of data streams until the failure store is tech preview.

View file

@ -35,7 +35,7 @@ tasks.named('forbiddenApisMain').configure {
replaceSignatureFiles 'jdk-signatures'
}
ext.projectLicenses.set(['The Apache Software License, Version 2.0': 'http://www.apache.org/licenses/LICENSE-2.0'])
ext.projectLicenses.set(['The Apache Software License, Version 2.0': providers.provider(() -> 'http://www.apache.org/licenses/LICENSE-2.0')])
licenseFile.set(rootProject.file('licenses/APACHE-LICENSE-2.0.txt'))
tasks.withType(LicenseHeadersTask.class).configureEach {

View file

@ -36,7 +36,7 @@ tasks.named('forbiddenApisMain').configure {
replaceSignatureFiles 'jdk-signatures'
}
ext.projectLicenses.set(['The Apache Software License, Version 2.0': 'http://www.apache.org/licenses/LICENSE-2.0'])
ext.projectLicenses.set(['The Apache Software License, Version 2.0': providers.provider(() -> 'http://www.apache.org/licenses/LICENSE-2.0')])
licenseFile.set(rootProject.file('licenses/APACHE-LICENSE-2.0.txt'))
tasks.withType(LicenseHeadersTask.class).configureEach {

View file

@ -64,9 +64,6 @@ tests:
- class: org.elasticsearch.xpack.inference.TextEmbeddingCrudIT
method: testPutE5Small_withPlatformSpecificVariant
issue: https://github.com/elastic/elasticsearch/issues/113950
- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT
method: test {yaml=reference/rest-api/usage/line_38}
issue: https://github.com/elastic/elasticsearch/issues/113694
- class: org.elasticsearch.xpack.remotecluster.RemoteClusterSecurityWithApmTracingRestIT
method: testTracingCrossCluster
issue: https://github.com/elastic/elasticsearch/issues/112731
@ -233,8 +230,6 @@ tests:
- class: org.elasticsearch.packaging.test.ArchiveTests
method: test51AutoConfigurationWithPasswordProtectedKeystore
issue: https://github.com/elastic/elasticsearch/issues/118212
- class: org.elasticsearch.ingest.common.IngestCommonClientYamlTestSuiteIT
issue: https://github.com/elastic/elasticsearch/issues/118215
- class: org.elasticsearch.datastreams.DataStreamsClientYamlTestSuiteIT
method: test {p0=data_stream/120_data_streams_stats/Multiple data stream}
issue: https://github.com/elastic/elasticsearch/issues/118217
@ -305,6 +300,9 @@ tests:
- class: org.elasticsearch.xpack.esql.action.EsqlNodeFailureIT
method: testFailureLoadingFields
issue: https://github.com/elastic/elasticsearch/issues/118000
- class: org.elasticsearch.smoketest.SmokeTestMultiNodeClientYamlTestSuiteIT
method: test {yaml=indices.create/20_synthetic_source/create index with use_synthetic_source}
issue: https://github.com/elastic/elasticsearch/issues/119191
# Examples:
#

View file

@ -28,7 +28,7 @@ configurations {
}
dependencies {
api project(path: 'hadoop-client-api', configuration: 'shadow')
api project(path: 'hadoop-client-api', configuration: 'default')
if (isEclipse) {
/*
* Eclipse can't pick up the shadow dependency so we point it at *something*

View file

@ -1,16 +1,46 @@
apply plugin: 'elasticsearch.build'
apply plugin: 'com.gradleup.shadow'
apply plugin: 'elasticsearch.java'
sourceSets {
patcher
}
configurations {
thejar {
canBeResolved = true
}
}
dependencies {
implementation "org.apache.hadoop:hadoop-client-api:${project.parent.versions.hadoop}"
thejar("org.apache.hadoop:hadoop-client-api:${project.parent.versions.hadoop}") {
transitive = false
}
tasks.named('shadowJar').configure {
exclude 'org/apache/hadoop/util/ShutdownHookManager$*.class'
patcherImplementation 'org.ow2.asm:asm:9.7.1'
patcherImplementation 'org.ow2.asm:asm-tree:9.7.1'
}
['jarHell', 'thirdPartyAudit', 'forbiddenApisMain', 'splitPackagesAudit'].each {
tasks.named(it).configure {
enabled = false
def outputDir = layout.buildDirectory.dir("patched-classes")
def patchTask = tasks.register("patchClasses", JavaExec) {
inputs.files(configurations.thejar).withPathSensitivity(PathSensitivity.RELATIVE)
inputs.files(sourceSets.patcher.output).withPathSensitivity(PathSensitivity.RELATIVE)
outputs.dir(outputDir)
classpath = sourceSets.patcher.runtimeClasspath
mainClass = 'org.elasticsearch.hdfs.patch.HdfsClassPatcher'
doFirst {
args(configurations.thejar.singleFile, outputDir.get().asFile)
}
}
tasks.named('jar').configure {
dependsOn(configurations.thejar)
from(patchTask)
from({ project.zipTree(configurations.thejar.singleFile) }) {
eachFile {
if (outputDir.get().file(it.relativePath.pathString).asFile.exists()) {
it.exclude()
}
}
}
}

View file

@ -1,48 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.apache.hadoop.util;
import java.util.concurrent.TimeUnit;
/**
* A replacement for the ShutdownHookManager from hadoop.
*
* This class does not actually add a shutdown hook. Hadoop's shutdown hook
* manager does not fail gracefully when it lacks security manager permissions
* to add shutdown hooks. This implements the same api as the hadoop class, but
* with no-ops.
*/
public class ShutdownHookManager {
private static final ShutdownHookManager MGR = new ShutdownHookManager();
public static ShutdownHookManager get() {
return MGR;
}
private ShutdownHookManager() {}
public void addShutdownHook(Runnable shutdownHook, int priority) {}
public void addShutdownHook(Runnable shutdownHook, int priority, long timeout, TimeUnit unit) {}
public boolean removeShutdownHook(Runnable shutdownHook) {
return false;
}
public boolean hasShutdownHook(Runnable shutdownHook) {
return false;
}
public boolean isShutdownInProgress() {
return false;
}
public void clearShutdownHooks() {}
}

View file

@ -0,0 +1,55 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.hdfs.patch;
import org.objectweb.asm.ClassReader;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.ClassWriter;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Map;
import java.util.function.Function;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
public class HdfsClassPatcher {
static final Map<String, Function<ClassWriter, ClassVisitor>> patchers = Map.of(
"org/apache/hadoop/util/ShutdownHookManager.class",
ShutdownHookManagerPatcher::new,
"org/apache/hadoop/util/Shell.class",
ShellPatcher::new
);
public static void main(String[] args) throws Exception {
String jarPath = args[0];
Path outputDir = Paths.get(args[1]);
try (JarFile jarFile = new JarFile(new File(jarPath))) {
for (var patcher : patchers.entrySet()) {
JarEntry jarEntry = jarFile.getJarEntry(patcher.getKey());
if (jarEntry == null) {
throw new IllegalArgumentException("path [" + patcher.getKey() + "] not found in [" + jarPath + "]");
}
byte[] classToPatch = jarFile.getInputStream(jarEntry).readAllBytes();
ClassReader classReader = new ClassReader(classToPatch);
ClassWriter classWriter = new ClassWriter(classReader, 0);
classReader.accept(patcher.getValue().apply(classWriter), 0);
Path outputFile = outputDir.resolve(patcher.getKey());
Files.createDirectories(outputFile.getParent());
Files.write(outputFile, classWriter.toByteArray());
}
}
}
}

View file

@ -0,0 +1,36 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.hdfs.patch;
import org.objectweb.asm.MethodVisitor;
import org.objectweb.asm.Opcodes;
public class MethodReplacement extends MethodVisitor {
private final MethodVisitor delegate;
private final Runnable bodyWriter;
MethodReplacement(MethodVisitor delegate, Runnable bodyWriter) {
super(Opcodes.ASM9);
this.delegate = delegate;
this.bodyWriter = bodyWriter;
}
@Override
public void visitCode() {
// delegate.visitCode();
bodyWriter.run();
// delegate.visitEnd();
}
@Override
public void visitMaxs(int maxStack, int maxLocals) {
delegate.visitMaxs(maxStack, maxLocals);
}
}

View file

@ -0,0 +1,34 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.hdfs.patch;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.ClassWriter;
import org.objectweb.asm.MethodVisitor;
import org.objectweb.asm.Opcodes;
class ShellPatcher extends ClassVisitor {
ShellPatcher(ClassWriter classWriter) {
super(Opcodes.ASM9, classWriter);
}
@Override
public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, String[] exceptions) {
MethodVisitor mv = super.visitMethod(access, name, descriptor, signature, exceptions);
if (name.equals("isSetsidSupported")) {
return new MethodReplacement(mv, () -> {
mv.visitInsn(Opcodes.ICONST_0);
mv.visitInsn(Opcodes.IRETURN);
});
}
return mv;
}
}

View file

@ -0,0 +1,66 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.hdfs.patch;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.ClassWriter;
import org.objectweb.asm.MethodVisitor;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.Type;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
class ShutdownHookManagerPatcher extends ClassVisitor {
private static final String CLASSNAME = "org/apache/hadoop/util/ShutdownHookManager";
private static final Set<String> VOID_METHODS = Set.of("addShutdownHook", "clearShutdownHooks");
private static final Set<String> BOOLEAN_METHODS = Set.of("removeShutdownHook", "hasShutdownHook", "isShutdownInProgress");
ShutdownHookManagerPatcher(ClassWriter classWriter) {
super(Opcodes.ASM9, classWriter);
}
@Override
public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, String[] exceptions) {
MethodVisitor mv = super.visitMethod(access, name, descriptor, signature, exceptions);
if (VOID_METHODS.contains(name)) {
// make void methods noops
return new MethodReplacement(mv, () -> { mv.visitInsn(Opcodes.RETURN); });
} else if (BOOLEAN_METHODS.contains(name)) {
// make boolean methods always return false
return new MethodReplacement(mv, () -> {
mv.visitInsn(Opcodes.ICONST_0);
mv.visitInsn(Opcodes.IRETURN);
});
} else if (name.equals("<clinit>")) {
return new MethodReplacement(mv, () -> {
// just initialize the statics, don't actually get runtime to add shutdown hook
var classType = Type.getObjectType(CLASSNAME);
mv.visitTypeInsn(Opcodes.NEW, CLASSNAME);
mv.visitInsn(Opcodes.DUP);
mv.visitMethodInsn(Opcodes.INVOKESPECIAL, CLASSNAME, "<init>", "()V", false);
mv.visitFieldInsn(Opcodes.PUTSTATIC, CLASSNAME, "MGR", classType.getDescriptor());
var timeUnitType = Type.getType(TimeUnit.class);
mv.visitFieldInsn(Opcodes.GETSTATIC, timeUnitType.getInternalName(), "SECONDS", timeUnitType.getDescriptor());
mv.visitFieldInsn(Opcodes.PUTSTATIC, CLASSNAME, "TIME_UNIT_DEFAULT", timeUnitType.getDescriptor());
var executorServiceType = Type.getType(ExecutorService.class);
mv.visitInsn(Opcodes.ACONST_NULL);
mv.visitFieldInsn(Opcodes.PUTSTATIC, CLASSNAME, "EXECUTOR", executorServiceType.getDescriptor());
mv.visitInsn(Opcodes.RETURN);
});
}
return mv;
}
}

View file

@ -12,7 +12,6 @@ package org.elasticsearch.upgrades;
import com.carrotsearch.randomizedtesting.annotations.Name;
import org.apache.http.HttpHost;
import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix;
import org.elasticsearch.Build;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse;
import org.elasticsearch.client.Request;
@ -40,7 +39,6 @@ import static org.hamcrest.Matchers.equalTo;
* In 8.2 we also added the ability to filter fields by type and metadata, with some post-hoc filtering applied on
* the co-ordinating node if older nodes were included in the system
*/
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103473")
public class FieldCapsIT extends AbstractRollingUpgradeTestCase {
public FieldCapsIT(@Name("upgradedNodes") int upgradedNodes) {

View file

@ -15,6 +15,7 @@ import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthActio
import org.elasticsearch.action.admin.cluster.state.ClusterStateAction;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction;
import org.elasticsearch.action.admin.indices.recovery.RecoveryAction;
import org.elasticsearch.action.admin.indices.template.get.GetComponentTemplateAction;
import org.elasticsearch.action.support.CancellableActionTestPlugin;
import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.action.support.RefCountingListener;
@ -66,6 +67,10 @@ public class RestActionCancellationIT extends HttpSmokeTestCase {
runRestActionCancellationTest(new Request(HttpGet.METHOD_NAME, "/_cat/aliases"), GetAliasesAction.NAME);
}
public void testGetComponentTemplateCancellation() {
runRestActionCancellationTest(new Request(HttpGet.METHOD_NAME, "/_component_template"), GetComponentTemplateAction.NAME);
}
private void runRestActionCancellationTest(Request request, String actionName) {
final var node = usually() ? internalCluster().getRandomNodeName() : internalCluster().startCoordinatingOnlyNode(Settings.EMPTY);

View file

@ -1720,3 +1720,59 @@ setup:
- match: { docs.0.doc._source.foo: 3 }
- match: { docs.0.doc._source.bar: "some text value" }
- not_exists: docs.0.doc.error
---
"Test ignored_fields":
- skip:
features:
- headers
- allowed_warnings
- requires:
cluster_features: ["simulate.ignored.fields"]
reason: "ingest simulate ignored fields added in 8.18"
- do:
headers:
Content-Type: application/json
simulate.ingest:
index: nonexistent
body: >
{
"docs": [
{
"_index": "simulate-test",
"_id": "y9Es_JIBiw6_GgN-U0qy",
"_score": 1,
"_source": {
"abc": "sfdsfsfdsfsfdsfsfdsfsfdsfsfdsf"
}
}
],
"index_template_substitutions": {
"ind_temp": {
"index_patterns": ["simulate-test"],
"composed_of": ["simulate-test"]
}
},
"component_template_substitutions": {
"simulate-test": {
"template": {
"mappings": {
"dynamic": false,
"properties": {
"abc": {
"type": "keyword",
"ignore_above": 1
}
}
}
}
}
}
}
- length: { docs: 1 }
- match: { docs.0.doc._index: "simulate-test" }
- match: { docs.0.doc._source.abc: "sfdsfsfdsfsfdsfsfdsfsfdsfsfdsf" }
- match: { docs.0.doc.ignored_fields: [ {"field": "abc"} ] }
- not_exists: docs.0.doc.error

View file

@ -13,7 +13,7 @@ restResources {
}
// REST API specifications are published under the Apache 2.0 License
ext.projectLicenses.set(['The Apache Software License, Version 2.0': 'http://www.apache.org/licenses/LICENSE-2.0'])
ext.projectLicenses.set(['The Apache Software License, Version 2.0': providers.provider(() -> 'http://www.apache.org/licenses/LICENSE-2.0')])
licenseFile.set(rootProject.file('licenses/APACHE-LICENSE-2.0.txt'))
configurations {

View file

@ -28,9 +28,10 @@
"params":{
"master_timeout":{
"type":"time",
"description":"Explicit operation timeout for connection to master node"
"description":"Timeout for waiting for new cluster state in case it is blocked"
},
"local":{
"deprecated":true,
"type":"boolean",
"description":"Return local information, do not retrieve the state from master node (default: false)"
}

View file

@ -34,9 +34,10 @@
"params":{
"master_timeout":{
"type":"time",
"description":"Explicit operation timeout for connection to master node"
"description":"Timeout for waiting for new cluster state in case it is blocked"
},
"local":{
"deprecated":true,
"type":"boolean",
"description":"Return local information, do not retrieve the state from master node (default: false)"
},

View file

@ -171,3 +171,33 @@
- match: {component_templates.0.component_template.template.lifecycle.enabled: true}
- match: {component_templates.0.component_template.template.lifecycle.data_retention: "10d"}
- is_true: component_templates.0.component_template.template.lifecycle.rollover
---
"Deprecated local parameter":
- requires:
capabilities:
- method: GET
path: /_component_template
capabilities: ["local_param_deprecated"]
test_runner_features: ["capabilities", "warnings"]
reason: Deprecation was implemented with capability
- do:
cluster.get_component_template:
local: true
warnings:
- "the [?local] query parameter to this API has no effect, is now deprecated, and will be removed in a future version"
---
"Deprecated local parameter works in v8 compat mode":
- requires:
test_runner_features: ["headers"]
- do:
headers:
Content-Type: "application/vnd.elasticsearch+json;compatible-with=8"
Accept: "application/vnd.elasticsearch+json;compatible-with=8"
cluster.get_component_template:
local: true
- exists: component_templates

View file

@ -463,7 +463,7 @@ public class ComponentTemplatesFileSettingsIT extends ESIntegTestCase {
final var componentResponse = client().execute(
GetComponentTemplateAction.INSTANCE,
new GetComponentTemplateAction.Request("other*")
new GetComponentTemplateAction.Request(TEST_REQUEST_TIMEOUT, "other*")
).get();
assertTrue(componentResponse.getComponentTemplates().isEmpty());

View file

@ -146,6 +146,7 @@ public class TransportVersions {
public static final TransportVersion NODE_VERSION_INFORMATION_WITH_MIN_READ_ONLY_INDEX_VERSION = def(8_810_00_0);
public static final TransportVersion ERROR_TRACE_IN_TRANSPORT_HEADER = def(8_811_00_0);
public static final TransportVersion FAILURE_STORE_ENABLED_BY_CLUSTER_SETTING = def(8_812_00_0);
public static final TransportVersion SIMULATE_IGNORED_FIELDS = def(8_813_00_0);
/*
* WARNING: DO NOT MERGE INTO MAIN!

View file

@ -14,12 +14,17 @@ import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.ActionType;
import org.elasticsearch.action.admin.indices.rollover.RolloverConfiguration;
import org.elasticsearch.action.support.master.MasterNodeReadRequest;
import org.elasticsearch.action.support.local.LocalClusterStateRequest;
import org.elasticsearch.cluster.metadata.ComponentTemplate;
import org.elasticsearch.cluster.metadata.DataStreamGlobalRetention;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.core.UpdateForV10;
import org.elasticsearch.tasks.CancellableTask;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.xcontent.ParseField;
import org.elasticsearch.xcontent.ToXContentObject;
import org.elasticsearch.xcontent.XContentBuilder;
@ -43,22 +48,23 @@ public class GetComponentTemplateAction extends ActionType<GetComponentTemplateA
/**
* Request that to retrieve one or more component templates
*/
public static class Request extends MasterNodeReadRequest<Request> {
public static class Request extends LocalClusterStateRequest {
@Nullable
private String name;
private boolean includeDefaults;
public Request() {
super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT);
}
public Request(String name) {
super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT);
public Request(TimeValue masterTimeout, String name) {
super(masterTimeout);
this.name = name;
this.includeDefaults = false;
}
/**
* NB prior to 9.0 get-component was a TransportMasterNodeReadAction so for BwC we must remain able to read these requests until
* we no longer need to support calling this action remotely.
*/
@UpdateForV10(owner = UpdateForV10.Owner.DATA_MANAGEMENT)
public Request(StreamInput in) throws IOException {
super(in);
name = in.readOptionalString();
@ -70,17 +76,13 @@ public class GetComponentTemplateAction extends ActionType<GetComponentTemplateA
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeOptionalString(name);
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) {
out.writeBoolean(includeDefaults);
}
public ActionRequestValidationException validate() {
return null;
}
@Override
public ActionRequestValidationException validate() {
return null;
public Task createTask(long id, String type, String action, TaskId parentTaskId, Map<String, String> headers) {
return new CancellableTask(id, type, action, "", parentTaskId, headers);
}
/**
@ -123,19 +125,6 @@ public class GetComponentTemplateAction extends ActionType<GetComponentTemplateA
@Nullable
private final RolloverConfiguration rolloverConfiguration;
public Response(StreamInput in) throws IOException {
super(in);
componentTemplates = in.readMap(ComponentTemplate::new);
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) {
rolloverConfiguration = in.readOptionalWriteable(RolloverConfiguration::new);
} else {
rolloverConfiguration = null;
}
if (in.getTransportVersion().between(TransportVersions.V_8_14_0, TransportVersions.V_8_16_0)) {
in.readOptionalWriteable(DataStreamGlobalRetention::read);
}
}
/**
* Please use {@link GetComponentTemplateAction.Response#Response(Map)}
*/
@ -183,6 +172,11 @@ public class GetComponentTemplateAction extends ActionType<GetComponentTemplateA
return null;
}
/**
* NB prior to 9.0 get-component was a TransportMasterNodeReadAction so for BwC we must remain able to write these responses until
* we no longer need to support calling this action remotely.
*/
@UpdateForV10(owner = UpdateForV10.Owner.DATA_MANAGEMENT)
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeMap(componentTemplates, StreamOutput::writeWriteable);

View file

@ -12,55 +12,65 @@ package org.elasticsearch.action.admin.indices.template.get;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.master.TransportMasterNodeReadProjectAction;
import org.elasticsearch.action.support.ChannelActionListener;
import org.elasticsearch.action.support.local.TransportLocalClusterStateAction;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ProjectState;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.ComponentTemplate;
import org.elasticsearch.cluster.metadata.DataStreamLifecycle;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.project.ProjectResolver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.core.UpdateForV10;
import org.elasticsearch.injection.guice.Inject;
import org.elasticsearch.tasks.CancellableTask;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import java.util.HashMap;
import java.util.Map;
public class TransportGetComponentTemplateAction extends TransportMasterNodeReadProjectAction<
public class TransportGetComponentTemplateAction extends TransportLocalClusterStateAction<
GetComponentTemplateAction.Request,
GetComponentTemplateAction.Response> {
private final ClusterSettings clusterSettings;
private final ProjectResolver projectResolver;
/**
* NB prior to 9.0 this was a TransportMasterNodeReadAction so for BwC it must be registered with the TransportService until
* we no longer need to support calling this action remotely.
*/
@UpdateForV10(owner = UpdateForV10.Owner.DATA_MANAGEMENT)
@SuppressWarnings("this-escape")
@Inject
public TransportGetComponentTemplateAction(
TransportService transportService,
ClusterService clusterService,
ThreadPool threadPool,
ActionFilters actionFilters,
IndexNameExpressionResolver indexNameExpressionResolver,
ProjectResolver projectResolver
) {
super(
GetComponentTemplateAction.NAME,
transportService,
clusterService,
threadPool,
actionFilters,
GetComponentTemplateAction.Request::new,
projectResolver,
indexNameExpressionResolver,
GetComponentTemplateAction.Response::new,
transportService.getTaskManager(),
clusterService,
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
clusterSettings = clusterService.getClusterSettings();
this.projectResolver = projectResolver;
transportService.registerRequestHandler(
actionName,
executor,
false,
true,
GetComponentTemplateAction.Request::new,
(request, channel, task) -> executeDirect(task, request, new ChannelActionListener<>(channel))
);
}
@Override
@ -69,13 +79,14 @@ public class TransportGetComponentTemplateAction extends TransportMasterNodeRead
}
@Override
protected void masterOperation(
protected void localClusterStateOperation(
Task task,
GetComponentTemplateAction.Request request,
ProjectState projectState,
ClusterState clusterState,
ActionListener<GetComponentTemplateAction.Response> listener
) {
Map<String, ComponentTemplate> allTemplates = projectState.metadata().componentTemplates();
final var cancellableTask = (CancellableTask) task;
Map<String, ComponentTemplate> allTemplates = projectResolver.getProjectMetadata(clusterState).componentTemplates();
Map<String, ComponentTemplate> results;
// If we did not ask for a specific name, then we return all templates
@ -97,6 +108,7 @@ public class TransportGetComponentTemplateAction extends TransportMasterNodeRead
}
}
cancellableTask.ensureNotCancelled();
if (request.includeDefaults()) {
listener.onResponse(
new GetComponentTemplateAction.Response(

View file

@ -15,6 +15,7 @@ import org.elasticsearch.features.NodeFeature;
import java.util.Set;
import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_COMPONENT_TEMPLATE_SUBSTITUTIONS;
import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_IGNORED_FIELDS;
import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_INDEX_TEMPLATE_SUBSTITUTIONS;
import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_MAPPING_ADDITION;
import static org.elasticsearch.action.bulk.TransportSimulateBulkAction.SIMULATE_MAPPING_VALIDATION;
@ -29,7 +30,8 @@ public class BulkFeatures implements FeatureSpecification {
SIMULATE_COMPONENT_TEMPLATE_SUBSTITUTIONS,
SIMULATE_INDEX_TEMPLATE_SUBSTITUTIONS,
SIMULATE_MAPPING_ADDITION,
SIMULATE_SUPPORT_NON_TEMPLATE_MAPPING
SIMULATE_SUPPORT_NON_TEMPLATE_MAPPING,
SIMULATE_IGNORED_FIELDS
);
}
}

View file

@ -9,6 +9,8 @@
package org.elasticsearch.action.bulk;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.DocWriteRequest;
import org.elasticsearch.action.admin.indices.template.post.TransportSimulateIndexTemplateAction;
@ -35,6 +37,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.AtomicArray;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.core.Tuple;
import org.elasticsearch.features.NodeFeature;
import org.elasticsearch.index.IndexSettingProvider;
import org.elasticsearch.index.IndexSettingProviders;
@ -42,6 +45,8 @@ import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.index.IndexingPressure;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.mapper.IgnoredFieldMapper;
import org.elasticsearch.index.mapper.LuceneDocument;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.index.seqno.SequenceNumbers;
@ -62,6 +67,7 @@ import org.elasticsearch.xcontent.XContentParserConfiguration;
import org.elasticsearch.xcontent.XContentType;
import java.io.IOException;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -87,6 +93,7 @@ public class TransportSimulateBulkAction extends TransportAbstractBulkAction {
public static final NodeFeature SIMULATE_INDEX_TEMPLATE_SUBSTITUTIONS = new NodeFeature("simulate.index.template.substitutions");
public static final NodeFeature SIMULATE_MAPPING_ADDITION = new NodeFeature("simulate.mapping.addition");
public static final NodeFeature SIMULATE_SUPPORT_NON_TEMPLATE_MAPPING = new NodeFeature("simulate.support.non.template.mapping");
public static final NodeFeature SIMULATE_IGNORED_FIELDS = new NodeFeature("simulate.ignored.fields");
private final IndicesService indicesService;
private final NamedXContentRegistry xContentRegistry;
private final Set<IndexSettingProvider> indexSettingProviders;
@ -141,12 +148,13 @@ public class TransportSimulateBulkAction extends TransportAbstractBulkAction {
DocWriteRequest<?> docRequest = bulkRequest.requests.get(i);
assert docRequest instanceof IndexRequest : "TransportSimulateBulkAction should only ever be called with IndexRequests";
IndexRequest request = (IndexRequest) docRequest;
Exception mappingValidationException = validateMappings(
Tuple<Collection<String>, Exception> validationResult = validateMappings(
componentTemplateSubstitutions,
indexTemplateSubstitutions,
mappingAddition,
request
);
Exception mappingValidationException = validationResult.v2();
responses.set(
i,
BulkItemResponse.success(
@ -159,6 +167,7 @@ public class TransportSimulateBulkAction extends TransportAbstractBulkAction {
request.source(),
request.getContentType(),
request.getExecutedPipelines(),
validationResult.v1(),
mappingValidationException
)
)
@ -172,11 +181,12 @@ public class TransportSimulateBulkAction extends TransportAbstractBulkAction {
/**
* This creates a temporary index with the mappings of the index in the request, and then attempts to index the source from the request
* into it. If there is a mapping exception, that exception is returned. On success the returned exception is null.
* @parem componentTemplateSubstitutions The component template definitions to use in place of existing ones for validation
* @param componentTemplateSubstitutions The component template definitions to use in place of existing ones for validation
* @param request The IndexRequest whose source will be validated against the mapping (if it exists) of its index
* @return a mapping exception if the source does not match the mappings, otherwise null
* @return a Tuple containing: (1) in v1 the names of any fields that would be ignored upon indexing and (2) in v2 the mapping
* exception if the source does not match the mappings, otherwise null
*/
private Exception validateMappings(
private Tuple<Collection<String>, Exception> validateMappings(
Map<String, ComponentTemplate> componentTemplateSubstitutions,
Map<String, ComposableIndexTemplate> indexTemplateSubstitutions,
Map<String, Object> mappingAddition,
@ -194,6 +204,7 @@ public class TransportSimulateBulkAction extends TransportAbstractBulkAction {
ClusterState state = clusterService.state();
ProjectMetadata project = state.metadata().getProject();
Exception mappingValidationException = null;
Collection<String> ignoredFields = List.of();
IndexAbstraction indexAbstraction = project.getIndicesLookup().get(request.index());
try {
if (indexAbstraction != null
@ -280,7 +291,7 @@ public class TransportSimulateBulkAction extends TransportAbstractBulkAction {
);
CompressedXContent mappings = template.mappings();
CompressedXContent mergedMappings = mergeMappings(mappings, mappingAddition);
validateUpdatedMappings(mappings, mergedMappings, request, sourceToParse);
ignoredFields = validateUpdatedMappings(mappings, mergedMappings, request, sourceToParse);
} else {
List<IndexTemplateMetadata> matchingTemplates = findV1Templates(
simulatedState.metadata().getProject(),
@ -298,7 +309,7 @@ public class TransportSimulateBulkAction extends TransportAbstractBulkAction {
xContentRegistry
);
final CompressedXContent combinedMappings = mergeMappings(new CompressedXContent(mappingsMap), mappingAddition);
validateUpdatedMappings(null, combinedMappings, request, sourceToParse);
ignoredFields = validateUpdatedMappings(null, combinedMappings, request, sourceToParse);
} else if (indexAbstraction != null && mappingAddition.isEmpty() == false) {
/*
* The index matched no templates of any kind, including the substitutions. But it might have a mapping. So we
@ -311,7 +322,7 @@ public class TransportSimulateBulkAction extends TransportAbstractBulkAction {
.mapping();
CompressedXContent currentIndexCompressedXContent = mappingFromIndex == null ? null : mappingFromIndex.source();
CompressedXContent combinedMappings = mergeMappings(currentIndexCompressedXContent, mappingAddition);
validateUpdatedMappings(null, combinedMappings, request, sourceToParse);
ignoredFields = validateUpdatedMappings(null, combinedMappings, request, sourceToParse);
} else {
/*
* The index matched no templates and had no mapping of its own. If there were component template substitutions
@ -319,27 +330,28 @@ public class TransportSimulateBulkAction extends TransportAbstractBulkAction {
* and validate.
*/
final CompressedXContent combinedMappings = mergeMappings(null, mappingAddition);
validateUpdatedMappings(null, combinedMappings, request, sourceToParse);
ignoredFields = validateUpdatedMappings(null, combinedMappings, request, sourceToParse);
}
}
}
} catch (Exception e) {
mappingValidationException = e;
}
return mappingValidationException;
return Tuple.tuple(ignoredFields, mappingValidationException);
}
/*
* Validates that when updatedMappings are applied
* Validates that when updatedMappings are applied. If any fields would be ignored while indexing, then those field names are returned.
* Otherwise the returned Collection is empty.
*/
private void validateUpdatedMappings(
private Collection<String> validateUpdatedMappings(
@Nullable CompressedXContent originalMappings,
@Nullable CompressedXContent updatedMappings,
IndexRequest request,
SourceToParse sourceToParse
) throws IOException {
if (updatedMappings == null) {
return; // no validation to do
return List.of(); // no validation to do
}
Settings dummySettings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current())
@ -356,7 +368,7 @@ public class TransportSimulateBulkAction extends TransportAbstractBulkAction {
.settings(dummySettings)
.putMapping(new MappingMetadata(updatedMappings))
.build();
indicesService.withTempIndexService(originalIndexMetadata, indexService -> {
Engine.Index result = indicesService.withTempIndexService(originalIndexMetadata, indexService -> {
indexService.mapperService().merge(updatedIndexMetadata, MapperService.MergeReason.MAPPING_UPDATE);
return IndexShard.prepareIndex(
indexService.mapperService(),
@ -373,6 +385,24 @@ public class TransportSimulateBulkAction extends TransportAbstractBulkAction {
0
);
});
final Collection<String> ignoredFields;
if (result == null) {
ignoredFields = List.of();
} else {
List<LuceneDocument> luceneDocuments = result.parsedDoc().docs();
assert luceneDocuments == null || luceneDocuments.size() == 1 : "Expected a single lucene document from index attempt";
if (luceneDocuments != null && luceneDocuments.size() == 1) {
ignoredFields = luceneDocuments.getFirst()
.getFields()
.stream()
.filter(field -> field.name().equals(IgnoredFieldMapper.NAME) && field instanceof StringField)
.map(IndexableField::stringValue)
.toList();
} else {
ignoredFields = List.of();
}
}
return ignoredFields;
}
private static CompressedXContent mergeMappings(@Nullable CompressedXContent originalMapping, Map<String, Object> mappingAddition)

View file

@ -24,6 +24,7 @@ import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentType;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
/**
@ -34,6 +35,7 @@ import java.util.List;
public class SimulateIndexResponse extends IndexResponse {
private final BytesReference source;
private final XContentType sourceXContentType;
private final Collection<String> ignoredFields;
private final Exception exception;
@SuppressWarnings("this-escape")
@ -47,6 +49,11 @@ public class SimulateIndexResponse extends IndexResponse {
} else {
this.exception = null;
}
if (in.getTransportVersion().onOrAfter(TransportVersions.SIMULATE_IGNORED_FIELDS)) {
this.ignoredFields = in.readStringCollectionAsList();
} else {
this.ignoredFields = List.of();
}
}
@SuppressWarnings("this-escape")
@ -57,6 +64,7 @@ public class SimulateIndexResponse extends IndexResponse {
BytesReference source,
XContentType sourceXContentType,
List<String> pipelines,
Collection<String> ignoredFields,
@Nullable Exception exception
) {
// We don't actually care about most of the IndexResponse fields:
@ -73,6 +81,7 @@ public class SimulateIndexResponse extends IndexResponse {
this.source = source;
this.sourceXContentType = sourceXContentType;
setShardInfo(ShardInfo.EMPTY);
this.ignoredFields = ignoredFields;
this.exception = exception;
}
@ -84,6 +93,16 @@ public class SimulateIndexResponse extends IndexResponse {
builder.field("_source", XContentHelper.convertToMap(source, false, sourceXContentType).v2());
assert executedPipelines != null : "executedPipelines is null when it shouldn't be - we always list pipelines in simulate mode";
builder.array("executed_pipelines", executedPipelines.toArray());
if (ignoredFields.isEmpty() == false) {
builder.startArray("ignored_fields");
for (String ignoredField : ignoredFields) {
builder.startObject();
builder.field("field", ignoredField);
builder.endObject();
}
;
builder.endArray();
}
if (exception != null) {
builder.startObject("error");
ElasticsearchException.generateThrowableXContent(builder, params, exception);
@ -105,6 +124,9 @@ public class SimulateIndexResponse extends IndexResponse {
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) {
out.writeException(exception);
}
if (out.getTransportVersion().onOrAfter(TransportVersions.SIMULATE_IGNORED_FIELDS)) {
out.writeStringCollection(ignoredFields);
}
}
public Exception getException() {

View file

@ -73,15 +73,21 @@ public class SearchPhaseExecutionException extends ElasticsearchException {
// on coordinator node. so get the status from cause instead of returning SERVICE_UNAVAILABLE blindly
return getCause() == null ? RestStatus.SERVICE_UNAVAILABLE : ExceptionsHelper.status(getCause());
}
RestStatus status = shardFailures[0].status();
if (shardFailures.length > 1) {
for (int i = 1; i < shardFailures.length; i++) {
if (shardFailures[i].status().getStatus() >= RestStatus.INTERNAL_SERVER_ERROR.getStatus()) {
status = shardFailures[i].status();
RestStatus status = null;
for (ShardSearchFailure shardFailure : shardFailures) {
RestStatus shardStatus = shardFailure.status();
int statusCode = shardStatus.getStatus();
// Return if it's an error that can be retried.
// These currently take precedence over other status code(s).
if (statusCode >= 502 && statusCode <= 504) {
return shardStatus;
} else if (statusCode >= 500) {
status = shardStatus;
}
}
}
return status;
return status == null ? shardFailures[0].status() : status;
}
public ShardSearchFailure[] shardFailures() {

View file

@ -9,10 +9,13 @@
package org.elasticsearch.action.support.local;
import org.elasticsearch.TransportVersions;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.support.TransportAction;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.core.UpdateForV10;
import java.io.IOException;
import java.util.Objects;
@ -32,6 +35,20 @@ public abstract class LocalClusterStateRequest extends ActionRequest {
this.masterTimeout = Objects.requireNonNull(masterTimeout);
}
/**
* This constructor exists solely for BwC purposes. It should exclusively be used by requests that used to extend
* {@link org.elasticsearch.action.support.master.MasterNodeReadRequest} and still need to be able to serialize incoming request.
*/
@UpdateForV10(owner = UpdateForV10.Owner.DISTRIBUTED_COORDINATION)
protected LocalClusterStateRequest(StreamInput in) throws IOException {
super(in);
masterTimeout = in.readTimeValue();
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) {
in.readVLong();
}
in.readBoolean();
}
@Override
public final void writeTo(StreamOutput out) throws IOException {
TransportAction.localOnly();

View file

@ -122,7 +122,12 @@ public record UnifiedCompletionRequest(
);
static {
PARSER.declareField(constructorArg(), (p, c) -> parseContent(p), new ParseField("content"), ObjectParser.ValueType.VALUE_ARRAY);
PARSER.declareField(
optionalConstructorArg(),
(p, c) -> parseContent(p),
new ParseField("content"),
ObjectParser.ValueType.VALUE_ARRAY
);
PARSER.declareString(constructorArg(), new ParseField("role"));
PARSER.declareString(optionalConstructorArg(), new ParseField("name"));
PARSER.declareString(optionalConstructorArg(), new ParseField("tool_call_id"));
@ -143,7 +148,7 @@ public record UnifiedCompletionRequest(
public Message(StreamInput in) throws IOException {
this(
in.readNamedWriteable(Content.class),
in.readOptionalNamedWriteable(Content.class),
in.readString(),
in.readOptionalString(),
in.readOptionalString(),
@ -153,7 +158,7 @@ public record UnifiedCompletionRequest(
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeNamedWriteable(content);
out.writeOptionalNamedWriteable(content);
out.writeString(role);
out.writeOptionalString(name);
out.writeOptionalString(toolCallId);

View file

@ -9,11 +9,16 @@
package org.elasticsearch.rest;
import org.elasticsearch.action.support.local.TransportLocalClusterStateAction;
import org.elasticsearch.action.support.master.AcknowledgedRequest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.core.Booleans;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.core.RestApiVersion;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.core.UpdateForV10;
import java.net.URI;
import java.nio.charset.Charset;
@ -323,4 +328,23 @@ public class RestUtils {
assert restRequest != null;
return restRequest.paramAsTime(REST_TIMEOUT_PARAM, null);
}
// Remove the BWC support for the deprecated ?local parameter.
// NOTE: ensure each usage of this method has been deprecated for long enough to remove it.
@UpdateForV10(owner = UpdateForV10.Owner.DISTRIBUTED_COORDINATION)
public static void consumeDeprecatedLocalParameter(RestRequest request) {
if (request.hasParam("local") == false) {
return;
}
// Consume this param just for validation when in BWC mode.
final var local = request.paramAsBoolean("local", false);
if (request.getRestApiVersion() != RestApiVersion.V_8) {
DeprecationLogger.getLogger(TransportLocalClusterStateAction.class)
.critical(
DeprecationCategory.API,
"TransportLocalClusterStateAction-local-parameter",
"the [?local] query parameter to this API has no effect, is now deprecated, and will be removed in a future version"
);
}
}
}

View file

@ -14,8 +14,10 @@ import org.elasticsearch.client.internal.node.NodeClient;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.RestUtils;
import org.elasticsearch.rest.Scope;
import org.elasticsearch.rest.ServerlessScope;
import org.elasticsearch.rest.action.RestCancellableNodeClient;
import org.elasticsearch.rest.action.RestToXContentListener;
import java.io.IOException;
@ -26,11 +28,12 @@ import static org.elasticsearch.rest.RestRequest.Method.GET;
import static org.elasticsearch.rest.RestRequest.Method.HEAD;
import static org.elasticsearch.rest.RestStatus.NOT_FOUND;
import static org.elasticsearch.rest.RestStatus.OK;
import static org.elasticsearch.rest.RestUtils.getMasterNodeTimeout;
@ServerlessScope(Scope.PUBLIC)
public class RestGetComponentTemplateAction extends BaseRestHandler {
private static final Set<String> SUPPORTED_CAPABILITIES = Set.of("local_param_deprecated");
@Override
public List<Route> routes() {
return List.of(
@ -47,18 +50,23 @@ public class RestGetComponentTemplateAction extends BaseRestHandler {
@Override
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
final GetComponentTemplateAction.Request getRequest = new GetComponentTemplateAction.Request(request.param("name"));
final GetComponentTemplateAction.Request getRequest = new GetComponentTemplateAction.Request(
RestUtils.getMasterNodeTimeout(request),
request.param("name")
);
getRequest.includeDefaults(request.paramAsBoolean("include_defaults", false));
getRequest.local(request.paramAsBoolean("local", getRequest.local()));
getRequest.masterNodeTimeout(getMasterNodeTimeout(request));
RestUtils.consumeDeprecatedLocalParameter(request);
final boolean implicitAll = getRequest.name() == null;
return channel -> client.execute(GetComponentTemplateAction.INSTANCE, getRequest, new RestToXContentListener<>(channel, r -> {
return channel -> new RestCancellableNodeClient(client, request.getHttpChannel()).execute(
GetComponentTemplateAction.INSTANCE,
getRequest,
new RestToXContentListener<>(channel, r -> {
final boolean templateExists = r.getComponentTemplates().isEmpty() == false;
return (templateExists || implicitAll) ? OK : NOT_FOUND;
}));
})
);
}
@Override
@ -66,4 +74,8 @@ public class RestGetComponentTemplateAction extends BaseRestHandler {
return Settings.FORMAT_PARAMS;
}
@Override
public Set<String> supportedCapabilities() {
return SUPPORTED_CAPABILITIES;
}
}

View file

@ -19,16 +19,12 @@ import org.elasticsearch.cluster.metadata.ResettableValue;
import org.elasticsearch.cluster.metadata.Template;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.test.AbstractWireSerializingTestCase;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentType;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import static org.elasticsearch.cluster.metadata.ComponentTemplateTests.randomAliases;
@ -37,30 +33,7 @@ import static org.elasticsearch.cluster.metadata.ComponentTemplateTests.randomSe
import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS;
import static org.hamcrest.Matchers.containsString;
public class GetComponentTemplateResponseTests extends AbstractWireSerializingTestCase<GetComponentTemplateAction.Response> {
@Override
protected Writeable.Reader<GetComponentTemplateAction.Response> instanceReader() {
return GetComponentTemplateAction.Response::new;
}
@Override
protected GetComponentTemplateAction.Response createTestInstance() {
return new GetComponentTemplateAction.Response(
randomBoolean() ? Map.of() : randomTemplates(),
RolloverConfigurationTests.randomRolloverConditions()
);
}
@Override
protected GetComponentTemplateAction.Response mutateInstance(GetComponentTemplateAction.Response instance) {
var templates = instance.getComponentTemplates();
var rolloverConditions = instance.getRolloverConfiguration();
switch (randomInt(1)) {
case 0 -> templates = templates == null ? randomTemplates() : null;
case 1 -> rolloverConditions = randomValueOtherThan(rolloverConditions, RolloverConfigurationTests::randomRolloverConditions);
}
return new GetComponentTemplateAction.Response(templates, rolloverConditions);
}
public class GetComponentTemplateResponseTests extends ESTestCase {
public void testXContentSerializationWithRolloverAndEffectiveRetention() throws IOException {
Settings settings = null;
@ -102,17 +75,4 @@ public class GetComponentTemplateResponseTests extends AbstractWireSerializingTe
}
}
}
@Override
protected NamedWriteableRegistry getNamedWriteableRegistry() {
return new NamedWriteableRegistry(IndicesModule.getNamedWriteables());
}
private static Map<String, ComponentTemplate> randomTemplates() {
Map<String, ComponentTemplate> templates = new HashMap<>();
for (int i = 0; i < randomIntBetween(1, 4); i++) {
templates.put(randomAlphaOfLength(4), ComponentTemplateTests.randomInstance());
}
return templates;
}
}

View file

@ -48,6 +48,7 @@ public class SimulateIndexResponseTests extends ESTestCase {
sourceBytes,
XContentType.JSON,
pipelines,
List.of(),
null
);
@ -79,6 +80,7 @@ public class SimulateIndexResponseTests extends ESTestCase {
sourceBytes,
XContentType.JSON,
pipelines,
List.of(),
new ElasticsearchException("Some failure")
);
@ -103,6 +105,39 @@ public class SimulateIndexResponseTests extends ESTestCase {
),
Strings.toString(indexResponseWithException)
);
SimulateIndexResponse indexResponseWithIgnoredFields = new SimulateIndexResponse(
id,
index,
version,
sourceBytes,
XContentType.JSON,
pipelines,
List.of("abc", "def"),
null
);
assertEquals(
XContentHelper.stripWhitespace(
Strings.format(
"""
{
"_id": "%s",
"_index": "%s",
"_version": %d,
"_source": %s,
"executed_pipelines": [%s],
"ignored_fields": [{"field": "abc"}, {"field": "def"}]
}""",
id,
index,
version,
source,
pipelines.stream().map(pipeline -> "\"" + pipeline + "\"").collect(Collectors.joining(","))
)
),
Strings.toString(indexResponseWithIgnoredFields)
);
}
public void testSerialization() throws IOException {
@ -135,6 +170,7 @@ public class SimulateIndexResponseTests extends ESTestCase {
sourceBytes,
xContentType,
pipelines,
randomList(0, 20, () -> randomAlphaOfLength(15)),
randomBoolean() ? null : new ElasticsearchException("failed")
);
}

View file

@ -22,6 +22,7 @@ import org.elasticsearch.indices.InvalidIndexTemplateException;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.transport.NodeDisconnectedException;
import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.XContent;
import org.elasticsearch.xcontent.XContentParser;
@ -31,6 +32,7 @@ import java.io.IOException;
import static org.hamcrest.CoreMatchers.hasItem;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
public class SearchPhaseExecutionExceptionTests extends ESTestCase {
@ -168,4 +170,57 @@ public class SearchPhaseExecutionExceptionTests extends ESTestCase {
assertEquals(actual.status(), RestStatus.BAD_REQUEST);
}
public void testOnlyWithCodesThatDoNotRequirePrecedence() {
int pickedIndex = randomIntBetween(0, 1);
// Pick one of these exceptions randomly.
var searchExceptions = new ElasticsearchException[] {
new ElasticsearchException("simulated"),
new NodeDisconnectedException(null, "unused message", "unused action", null) };
// Status codes that map to searchExceptions.
var expectedStatusCodes = new RestStatus[] { RestStatus.INTERNAL_SERVER_ERROR, RestStatus.BAD_GATEWAY };
ShardSearchFailure shardFailure1 = new ShardSearchFailure(
searchExceptions[pickedIndex],
new SearchShardTarget("nodeID", new ShardId("someIndex", "someUUID", 1), null)
);
ShardSearchFailure shardFailure2 = new ShardSearchFailure(
searchExceptions[pickedIndex],
new SearchShardTarget("nodeID", new ShardId("someIndex", "someUUID", 2), null)
);
SearchPhaseExecutionException ex = new SearchPhaseExecutionException(
"search",
"all shards failed",
new ShardSearchFailure[] { shardFailure1, shardFailure2 }
);
assertThat(ex.status(), is(expectedStatusCodes[pickedIndex]));
}
public void testWithRetriableCodesThatTakePrecedence() {
// Maps to a 500.
ShardSearchFailure shardFailure1 = new ShardSearchFailure(
new ElasticsearchException("simulated"),
new SearchShardTarget("nodeID", new ShardId("someIndex", "someUUID", 1), null)
);
// Maps to a 502.
ShardSearchFailure shardFailure2 = new ShardSearchFailure(
new NodeDisconnectedException(null, "unused message", "unused action", null),
new SearchShardTarget("nodeID", new ShardId("someIndex", "someUUID", 2), null)
);
SearchPhaseExecutionException ex = new SearchPhaseExecutionException(
"search",
"all shards failed",
new ShardSearchFailure[] { shardFailure1, shardFailure2 }
);
// The 502 takes precedence over 500.
assertThat(ex.status(), is(RestStatus.BAD_GATEWAY));
}
}

View file

@ -175,6 +175,14 @@ public class RestSimulateIngestActionTests extends ESTestCase {
"executed_pipelines" : [
"pipeline1",
"pipeline2"
],
"ignored_fields" : [
{
"field" : "abc"
},
{
"field" : "def"
}
]
}
},
@ -199,6 +207,14 @@ public class RestSimulateIngestActionTests extends ESTestCase {
"executed_pipelines" : [
"pipeline1",
"pipeline2"
],
"ignored_fields" : [
{
"field" : "abc"
},
{
"field" : "def"
}
]
}
}
@ -228,6 +244,7 @@ public class RestSimulateIngestActionTests extends ESTestCase {
BytesReference.fromByteBuffers(sourceByteBuffer),
XContentType.JSON,
List.of("pipeline1", "pipeline2"),
List.of("abc", "def"),
null
)
);

View file

@ -120,7 +120,7 @@ public abstract class TestCluster {
);
final SubscribableListener<GetComponentTemplateAction.Response> getComponentTemplates = SubscribableListener.newForked(
l -> client().execute(GetComponentTemplateAction.INSTANCE, new GetComponentTemplateAction.Request("*"), l)
l -> client().execute(GetComponentTemplateAction.INSTANCE, new GetComponentTemplateAction.Request(TEST_REQUEST_TIMEOUT, "*"), l)
);
SubscribableListener

View file

@ -852,8 +852,7 @@ public abstract class ESRestTestCase extends ESTestCase {
}
private void wipeCluster() throws Exception {
logger.info("Waiting for all cluster updates up to this moment to be processed");
assertOK(adminClient().performRequest(new Request("GET", "_cluster/health?wait_for_events=languid")));
waitForClusterUpdates();
// Cleanup rollup before deleting indices. A rollup job might have bulks in-flight,
// so we need to fully shut them down first otherwise a job might stall waiting
@ -991,6 +990,38 @@ public abstract class ESRestTestCase extends ESTestCase {
deleteAllNodeShutdownMetadata();
}
private void waitForClusterUpdates() throws Exception {
logger.info("Waiting for all cluster updates up to this moment to be processed");
try {
assertOK(adminClient().performRequest(new Request("GET", "_cluster/health?wait_for_events=languid")));
} catch (ResponseException e) {
if (e.getResponse().getStatusLine().getStatusCode() == HttpStatus.SC_REQUEST_TIMEOUT) {
final var pendingTasks = getPendingClusterStateTasks();
if (pendingTasks != null) {
logger.error("Timed out waiting for cluster updates to be processed, {}", pendingTasks);
}
}
throw e;
}
}
private static String getPendingClusterStateTasks() {
try {
Response response = adminClient().performRequest(new Request("GET", "/_cluster/pending_tasks"));
List<?> tasks = (List<?>) entityAsMap(response).get("tasks");
if (false == tasks.isEmpty()) {
StringBuilder message = new StringBuilder("there are still running tasks:");
for (Object task : tasks) {
message.append('\n').append(task.toString());
}
return message.toString();
}
} catch (IOException e) {
fail(e, "Failed to retrieve pending tasks in the cluster during cleanup");
}
return null;
}
/**
* This method checks whether ILM policies or templates get recreated after they have been deleted. If so, we are probably deleting
* them unnecessarily, potentially causing test performance problems. This could happen for example if someone adds a new standard ILM
@ -1461,18 +1492,9 @@ public abstract class ESRestTestCase extends ESTestCase {
*/
private static void waitForClusterStateUpdatesToFinish() throws Exception {
assertBusy(() -> {
try {
Response response = adminClient().performRequest(new Request("GET", "/_cluster/pending_tasks"));
List<?> tasks = (List<?>) entityAsMap(response).get("tasks");
if (false == tasks.isEmpty()) {
StringBuilder message = new StringBuilder("there are still running tasks:");
for (Object task : tasks) {
message.append('\n').append(task.toString());
}
fail(message.toString());
}
} catch (IOException e) {
fail("cannot get cluster's pending tasks: " + e.getMessage());
final var pendingTasks = getPendingClusterStateTasks();
if (pendingTasks != null) {
fail(pendingTasks);
}
}, 30, TimeUnit.SECONDS);
}

View file

@ -7,7 +7,7 @@ policies:
- .github/updatecli/values.d/scm.yml
- .github/updatecli/values.d/ironbank.yml
- name: Update Updatecli policies
policy: ghcr.io/updatecli/policies/autodiscovery/updatecli:0.6.0@sha256:6bd6999620674b2fbb1d374f7a1a5e9740d042667f0592900b44259f3e1ae98f
policy: ghcr.io/updatecli/policies/autodiscovery/updatecli:0.8.0@sha256:99e9e61b501575c2c176c39f2275998d198b590a3f6b1fe829f7315f8d457e7f
values:
- .github/updatecli/values.d/scm.yml
- .github/updatecli/values.d/updatecli-compose.yml

View file

@ -36,7 +36,7 @@ subprojects {
}
project.pluginManager.withPlugin("elasticsearch.licensing") {
ext.projectLicenses.set(['Elastic License 2.0': ext.elasticLicenseUrl.get()])
ext.projectLicenses.set(['Elastic License 2.0': ext.elasticLicenseUrl])
}
project.pluginManager.withPlugin("elasticsearch.build") {

View file

@ -59,6 +59,7 @@ import org.elasticsearch.index.IndexMode;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.TimeSeriesParams;
@ -724,6 +725,9 @@ public class TransportDownsampleAction extends AcknowledgedTransportMasterNodeAc
if (mapping.get("format") != null) {
builder.field("format", mapping.get("format"));
}
if (mapping.get("ignore_malformed") != null) {
builder.field("ignore_malformed", mapping.get("ignore_malformed"));
}
}
} catch (IOException e) {
throw new ElasticsearchException("Unable to create timestamp field mapping for field [" + timestampField + "]", e);
@ -898,6 +902,12 @@ public class TransportDownsampleAction extends AcknowledgedTransportMasterNodeAc
sourceIndexMetadata.getSettings().get(MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.getKey())
);
}
if (sourceIndexMetadata.getSettings().hasValue(FieldMapper.IGNORE_MALFORMED_SETTING.getKey())) {
builder.put(
FieldMapper.IGNORE_MALFORMED_SETTING.getKey(),
sourceIndexMetadata.getSettings().get(FieldMapper.IGNORE_MALFORMED_SETTING.getKey())
);
}
CreateIndexClusterStateUpdateRequest createIndexClusterStateUpdateRequest = new CreateIndexClusterStateUpdateRequest(
"downsample",

View file

@ -49,6 +49,7 @@ import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.engine.VersionConflictEngineException;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper;
import org.elasticsearch.index.mapper.TimeSeriesParams;
@ -201,14 +202,19 @@ public class DownsampleActionSingleNodeTests extends ESSingleNodeTestCase {
IndexSettings.TIME_SERIES_START_TIME.getKey(),
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(Instant.ofEpochMilli(startTime).toEpochMilli())
)
.put(IndexSettings.TIME_SERIES_END_TIME.getKey(), "2106-01-08T23:40:53.384Z");
.put(IndexSettings.TIME_SERIES_END_TIME.getKey(), "2106-01-08T23:40:53.384Z")
.put(FieldMapper.IGNORE_MALFORMED_SETTING.getKey(), randomBoolean());
if (randomBoolean()) {
settings.put(IndexMetadata.SETTING_INDEX_HIDDEN, randomBoolean());
}
XContentBuilder mapping = jsonBuilder().startObject().startObject("_doc").startObject("properties");
mapping.startObject(FIELD_TIMESTAMP).field("type", "date").endObject();
mapping.startObject(FIELD_TIMESTAMP).field("type", "date");
if (settings.get(FieldMapper.IGNORE_MALFORMED_SETTING.getKey()).equals("true")) {
mapping.field("ignore_malformed", false);
}
mapping.endObject();
// Dimensions
mapping.startObject(FIELD_DIMENSION_1).field("type", "keyword").field("time_series_dimension", true).endObject();

View file

@ -15,6 +15,8 @@ import org.elasticsearch.xpack.esql.core.tree.Source;
import org.elasticsearch.xpack.esql.core.type.DataType;
import org.elasticsearch.xpack.esql.core.type.EsField;
import java.util.regex.Pattern;
import static java.util.Collections.emptyMap;
import static org.elasticsearch.test.ESTestCase.randomAlphaOfLength;
import static org.elasticsearch.test.ESTestCase.randomBoolean;
@ -26,6 +28,8 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER;
public final class TestUtils {
private TestUtils() {}
private static final Pattern WS_PATTERN = Pattern.compile("\\s");
public static Literal of(Object value) {
return of(Source.EMPTY, value);
}
@ -59,4 +63,9 @@ public final class TestUtils {
public static FieldAttribute getFieldAttribute(String name, DataType dataType) {
return new FieldAttribute(EMPTY, name, new EsField(name + "f", dataType, emptyMap(), true));
}
/** Similar to {@link String#strip()}, but removes the WS throughout the entire string. */
public static String stripThrough(String input) {
return WS_PATTERN.matcher(input).replaceAll(StringUtils.EMPTY);
}
}

View file

@ -130,8 +130,20 @@ class ValuesBytesRefAggregator {
private final BytesRefHash bytes;
private GroupingState(BigArrays bigArrays) {
values = new LongLongHash(1, bigArrays);
bytes = new BytesRefHash(1, bigArrays);
LongLongHash _values = null;
BytesRefHash _bytes = null;
try {
_values = new LongLongHash(1, bigArrays);
_bytes = new BytesRefHash(1, bigArrays);
values = _values;
bytes = _bytes;
_values = null;
_bytes = null;
} finally {
Releasables.closeExpectNoException(_values, _bytes);
}
}
void toIntermediate(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) {

View file

@ -244,8 +244,20 @@ $endif$
$if(long||double)$
values = new LongLongHash(1, bigArrays);
$elseif(BytesRef)$
values = new LongLongHash(1, bigArrays);
bytes = new BytesRefHash(1, bigArrays);
LongLongHash _values = null;
BytesRefHash _bytes = null;
try {
_values = new LongLongHash(1, bigArrays);
_bytes = new BytesRefHash(1, bigArrays);
values = _values;
bytes = _bytes;
_values = null;
_bytes = null;
} finally {
Releasables.closeExpectNoException(_values, _bytes);
}
$elseif(int||float)$
values = new LongHash(1, bigArrays);
$endif$

View file

@ -0,0 +1,55 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.compute.aggregation;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.compute.data.Block;
import org.elasticsearch.compute.data.BlockFactory;
import org.elasticsearch.compute.data.BlockUtils;
import org.elasticsearch.compute.operator.SequenceBytesRefBlockSourceOperator;
import org.elasticsearch.compute.operator.SourceOperator;
import java.util.Arrays;
import java.util.List;
import java.util.TreeSet;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static org.hamcrest.Matchers.containsInAnyOrder;
public class ValuesBytesRefAggregatorFunctionTests extends AggregatorFunctionTestCase {
@Override
protected SourceOperator simpleInput(BlockFactory blockFactory, int size) {
return new SequenceBytesRefBlockSourceOperator(
blockFactory,
IntStream.range(0, size).mapToObj(l -> new BytesRef(randomAlphaOfLengthBetween(0, 100)))
);
}
@Override
protected AggregatorFunctionSupplier aggregatorFunction(List<Integer> inputChannels) {
return new ValuesBytesRefAggregatorFunctionSupplier(inputChannels);
}
@Override
protected String expectedDescriptionOfAggregator() {
return "values of bytes";
}
@Override
public void assertSimpleOutput(List<Block> input, Block result) {
TreeSet<?> set = new TreeSet<>((List<?>) BlockUtils.toJavaObject(result, 0));
Object[] values = input.stream()
.flatMap(AggregatorFunctionTestCase::allBytesRefs)
.collect(Collectors.toSet())
.toArray(Object[]::new);
if (false == set.containsAll(Arrays.asList(values))) {
assertThat(set, containsInAnyOrder(values));
}
}
}

View file

@ -0,0 +1,63 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.compute.aggregation;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.compute.data.Block;
import org.elasticsearch.compute.data.BlockFactory;
import org.elasticsearch.compute.data.BlockUtils;
import org.elasticsearch.compute.data.Page;
import org.elasticsearch.compute.operator.LongBytesRefTupleBlockSourceOperator;
import org.elasticsearch.compute.operator.SourceOperator;
import org.elasticsearch.core.Tuple;
import java.util.Arrays;
import java.util.List;
import java.util.TreeSet;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
public class ValuesBytesRefGroupingAggregatorFunctionTests extends GroupingAggregatorFunctionTestCase {
@Override
protected AggregatorFunctionSupplier aggregatorFunction(List<Integer> inputChannels) {
return new ValuesBytesRefAggregatorFunctionSupplier(inputChannels);
}
@Override
protected String expectedDescriptionOfAggregator() {
return "values of bytes";
}
@Override
protected SourceOperator simpleInput(BlockFactory blockFactory, int size) {
return new LongBytesRefTupleBlockSourceOperator(
blockFactory,
IntStream.range(0, size).mapToObj(l -> Tuple.tuple(randomLongBetween(0, 4), new BytesRef(randomAlphaOfLengthBetween(0, 100))))
);
}
@Override
public void assertSimpleGroup(List<Page> input, Block result, int position, Long group) {
Object[] values = input.stream().flatMap(p -> allBytesRefs(p, group)).collect(Collectors.toSet()).toArray(Object[]::new);
Object resultValue = BlockUtils.toJavaObject(result, position);
switch (values.length) {
case 0 -> assertThat(resultValue, nullValue());
case 1 -> assertThat(resultValue, equalTo(values[0]));
default -> {
TreeSet<?> set = new TreeSet<>((List<?>) resultValue);
if (false == set.containsAll(Arrays.asList(values))) {
assertThat(set, containsInAnyOrder(values));
}
}
}
}
}

View file

@ -143,6 +143,7 @@ public abstract class OperatorTestCase extends AnyOperatorTestCase {
DriverContext driverContext = crankyDriverContext();
Exception exception = null;
boolean driverStarted = false;
try {
Operator operator = simple().get(driverContext);
@ -150,8 +151,8 @@ public abstract class OperatorTestCase extends AnyOperatorTestCase {
drive(operator, input.iterator(), driverContext);
// Either we get lucky and cranky doesn't throw and the test completes or we don't and it throws
} catch (CircuitBreakingException e) {
logger.info("broken", e);
assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE));
exception = e;
}
if (driverStarted == false) {
// if drive hasn't even started then we need to release the input pages
@ -159,7 +160,14 @@ public abstract class OperatorTestCase extends AnyOperatorTestCase {
}
// Note the lack of try/finally here - we're asserting that when the driver throws an exception we clear the breakers.
assertThat(inputFactoryContext.breaker().getUsed(), equalTo(0L));
long inputUsedBytes = inputFactoryContext.breaker().getUsed();
if (inputUsedBytes != 0L) {
fail(exception, "Expected no used bytes for input, found: " + inputUsedBytes);
}
long driverUsedBytes = driverContext.breaker().getUsed();
if (driverUsedBytes != 0L) {
fail(exception, "Expected no used bytes for driver, found: " + driverUsedBytes);
}
}
/**

View file

@ -30,6 +30,7 @@ import org.junit.Before;
import org.junit.ClassRule;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.Map;
@ -87,9 +88,11 @@ public class EsqlSecurityIT extends ESRestTestCase {
@Before
public void indexDocuments() throws IOException {
Settings lookupSettings = Settings.builder().put("index.mode", "lookup").build();
String mapping = """
"properties":{"value": {"type": "double"}, "org": {"type": "keyword"}}
""";
createIndex("index", Settings.EMPTY, mapping);
indexDocument("index", 1, 10.0, "sales");
indexDocument("index", 2, 20.0, "engineering");
@ -110,6 +113,16 @@ public class EsqlSecurityIT extends ESRestTestCase {
indexDocument("indexpartial", 2, 40.0, "sales");
refresh("indexpartial");
createIndex("lookup-user1", lookupSettings, mapping);
indexDocument("lookup-user1", 1, 12.0, "engineering");
indexDocument("lookup-user1", 2, 31.0, "sales");
refresh("lookup-user1");
createIndex("lookup-user2", lookupSettings, mapping);
indexDocument("lookup-user2", 1, 32.0, "marketing");
indexDocument("lookup-user2", 2, 40.0, "sales");
refresh("lookup-user2");
if (aliasExists("second-alias") == false) {
Request aliasRequest = new Request("POST", "_aliases");
aliasRequest.setJsonEntity("""
@ -126,6 +139,17 @@ public class EsqlSecurityIT extends ESRestTestCase {
}
}
},
{
"add": {
"alias": "lookup-first-alias",
"index": "lookup-user1",
"filter": {
"term": {
"org": "sales"
}
}
}
},
{
"add": {
"alias": "second-alias",
@ -229,22 +253,30 @@ public class EsqlSecurityIT extends ESRestTestCase {
public void testUnauthorizedIndices() throws IOException {
ResponseException error;
error = expectThrows(ResponseException.class, () -> runESQLCommand("user1", "from index-user2 | stats sum(value)"));
assertThat(error.getMessage(), containsString("Unknown index [index-user2]"));
assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(400));
error = expectThrows(ResponseException.class, () -> runESQLCommand("user2", "from index-user1 | stats sum(value)"));
assertThat(error.getMessage(), containsString("Unknown index [index-user1]"));
assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(400));
error = expectThrows(ResponseException.class, () -> runESQLCommand("alias_user2", "from index-user2 | stats sum(value)"));
assertThat(error.getMessage(), containsString("Unknown index [index-user2]"));
assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(400));
error = expectThrows(ResponseException.class, () -> runESQLCommand("metadata1_read2", "from index-user1 | stats sum(value)"));
assertThat(error.getMessage(), containsString("Unknown index [index-user1]"));
assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(400));
}
public void testInsufficientPrivilege() {
Exception error = expectThrows(Exception.class, () -> runESQLCommand("metadata1_read2", "FROM index-user1 | STATS sum=sum(value)"));
ResponseException error = expectThrows(
ResponseException.class,
() -> runESQLCommand("metadata1_read2", "FROM index-user1 | STATS sum=sum(value)")
);
logger.info("error", error);
assertThat(error.getMessage(), containsString("Unknown index [index-user1]"));
assertThat(error.getResponse().getStatusLine().getStatusCode(), equalTo(HttpStatus.SC_BAD_REQUEST));
}
public void testIndexPatternErrorMessageComparison_ESQL_SearchDSL() throws Exception {
@ -511,6 +543,63 @@ public class EsqlSecurityIT extends ESRestTestCase {
}
}
public void testLookupJoinIndexAllowed() throws Exception {
Response resp = runESQLCommand(
"metadata1_read2",
"ROW x = 40.0 | EVAL value = x | LOOKUP JOIN `lookup-user2` ON value | KEEP x, org"
);
assertOK(resp);
Map<String, Object> respMap = entityAsMap(resp);
assertThat(
respMap.get("columns"),
equalTo(List.of(Map.of("name", "x", "type", "double"), Map.of("name", "org", "type", "keyword")))
);
assertThat(respMap.get("values"), equalTo(List.of(List.of(40.0, "sales"))));
// Alias, should find the index and the row
resp = runESQLCommand("alias_user1", "ROW x = 31.0 | EVAL value = x | LOOKUP JOIN `lookup-first-alias` ON value | KEEP x, org");
assertOK(resp);
respMap = entityAsMap(resp);
assertThat(
respMap.get("columns"),
equalTo(List.of(Map.of("name", "x", "type", "double"), Map.of("name", "org", "type", "keyword")))
);
assertThat(respMap.get("values"), equalTo(List.of(List.of(31.0, "sales"))));
// Alias, for a row that's filtered out
resp = runESQLCommand("alias_user1", "ROW x = 123.0 | EVAL value = x | LOOKUP JOIN `lookup-first-alias` ON value | KEEP x, org");
assertOK(resp);
respMap = entityAsMap(resp);
assertThat(
respMap.get("columns"),
equalTo(List.of(Map.of("name", "x", "type", "double"), Map.of("name", "org", "type", "keyword")))
);
assertThat(respMap.get("values"), equalTo(List.of(Arrays.asList(123.0, null))));
}
public void testLookupJoinIndexForbidden() {
var resp = expectThrows(
ResponseException.class,
() -> runESQLCommand("metadata1_read2", "FROM lookup-user2 | EVAL value = 10.0 | LOOKUP JOIN `lookup-user1` ON value | KEEP x")
);
assertThat(resp.getMessage(), containsString("Unknown index [lookup-user1]"));
assertThat(resp.getResponse().getStatusLine().getStatusCode(), equalTo(HttpStatus.SC_BAD_REQUEST));
resp = expectThrows(
ResponseException.class,
() -> runESQLCommand("metadata1_read2", "ROW x = 10.0 | EVAL value = x | LOOKUP JOIN `lookup-user1` ON value | KEEP x")
);
assertThat(resp.getMessage(), containsString("Unknown index [lookup-user1]"));
assertThat(resp.getResponse().getStatusLine().getStatusCode(), equalTo(HttpStatus.SC_BAD_REQUEST));
resp = expectThrows(
ResponseException.class,
() -> runESQLCommand("alias_user1", "ROW x = 10.0 | EVAL value = x | LOOKUP JOIN `lookup-user1` ON value | KEEP x")
);
assertThat(resp.getMessage(), containsString("Unknown index [lookup-user1]"));
assertThat(resp.getResponse().getStatusLine().getStatusCode(), equalTo(HttpStatus.SC_BAD_REQUEST));
}
private void createEnrichPolicy() throws Exception {
createIndex("songs", Settings.EMPTY, """
"properties":{"song_id": {"type": "keyword"}, "title": {"type": "keyword"}, "artist": {"type": "keyword"} }

View file

@ -35,15 +35,15 @@ user2:
metadata1_read2:
cluster: []
indices:
- names: [ 'index-user1' ]
- names: [ 'index-user1', 'lookup-user1' ]
privileges: [ 'view_index_metadata' ]
- names: [ 'index-user2' ]
- names: [ 'index-user2', 'lookup-user2' ]
privileges: [ 'read' ]
alias_user1:
cluster: []
indices:
- names: [ 'first-alias' ]
- names: [ 'first-alias', 'lookup-first-alias' ]
privileges:
- read

View file

@ -21,7 +21,7 @@ import java.io.IOException;
import java.util.List;
import static org.elasticsearch.xpack.esql.CsvTestUtils.isEnabled;
import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V8;
import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V9;
import static org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase.Mode.ASYNC;
public class MixedClusterEsqlSpecIT extends EsqlSpecTestCase {
@ -96,7 +96,7 @@ public class MixedClusterEsqlSpecIT extends EsqlSpecTestCase {
@Override
protected boolean supportsIndexModeLookup() throws IOException {
return hasCapabilities(List.of(JOIN_LOOKUP_V8.capabilityName()));
return hasCapabilities(List.of(JOIN_LOOKUP_V9.capabilityName()));
}
@Override

View file

@ -48,7 +48,7 @@ import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.ENRICH_SOURCE_INDI
import static org.elasticsearch.xpack.esql.EsqlTestUtils.classpathResources;
import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS;
import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.INLINESTATS_V2;
import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V8;
import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_LOOKUP_V9;
import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.JOIN_PLANNING_V1;
import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.Cap.METADATA_FIELDS_REMOTE_TEST;
import static org.elasticsearch.xpack.esql.qa.rest.EsqlSpecTestCase.Mode.SYNC;
@ -124,7 +124,7 @@ public class MultiClusterSpecIT extends EsqlSpecTestCase {
assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS.capabilityName()));
assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(INLINESTATS_V2.capabilityName()));
assumeFalse("INLINESTATS not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_PLANNING_V1.capabilityName()));
assumeFalse("LOOKUP JOIN not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_LOOKUP_V8.capabilityName()));
assumeFalse("LOOKUP JOIN not yet supported in CCS", testCase.requiredCapabilities.contains(JOIN_LOOKUP_V9.capabilityName()));
}
private TestFeatureService remoteFeaturesService() throws IOException {
@ -283,8 +283,8 @@ public class MultiClusterSpecIT extends EsqlSpecTestCase {
@Override
protected boolean supportsIndexModeLookup() throws IOException {
// CCS does not yet support JOIN_LOOKUP_V8 and clusters falsely report they have this capability
// return hasCapabilities(List.of(JOIN_LOOKUP_V8.capabilityName()));
// CCS does not yet support JOIN_LOOKUP_V9 and clusters falsely report they have this capability
// return hasCapabilities(List.of(JOIN_LOOKUP_V9.capabilityName()));
return false;
}
}

View file

@ -221,7 +221,7 @@ public abstract class RequestIndexFilteringTestCase extends ESRestTestCase {
assertThat(e.getMessage(), containsString("index_not_found_exception"));
assertThat(e.getMessage(), containsString("no such index [foo]"));
if (EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled()) {
if (EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled()) {
e = expectThrows(
ResponseException.class,
() -> runEsql(timestampFilter("gte", "2020-01-01").query("FROM test1 | LOOKUP JOIN foo ON id1"))

View file

@ -497,6 +497,7 @@ null
evalDateParseWithTimezone
required_capability: date_parse_tz
row s = "12/Jul/2022:10:24:10 +0900" | eval d = date_parse("dd/MMM/yyyy:HH:mm:ss Z", s);
warningRegex:Date format \[dd/MMM/yyyy:HH:mm:ss Z\] contains textual field specifiers that could change in JDK 23.*
s:keyword | d:datetime
12/Jul/2022:10:24:10 +0900 | 2022-07-12T01:24:10.000Z
@ -505,6 +506,7 @@ s:keyword | d:datetime
evalDateParseWithTimezoneCrossingDayBoundary
required_capability: date_parse_tz
row s = "12/Jul/2022:08:24:10 +0900" | eval d = date_parse("dd/MMM/yyyy:HH:mm:ss Z", s);
warningRegex:Date format \[dd/MMM/yyyy:HH:mm:ss Z\] contains textual field specifiers that could change in JDK 23.*
s:keyword | d:datetime
12/Jul/2022:08:24:10 +0900 | 2022-07-11T23:24:10.000Z
@ -517,6 +519,8 @@ row s1 = "12/Jul/2022:10:24:10 +0900", s2 = "2022/12/07 09:24:10 +0800"
| eval eq = d1 == d2
| keep d1, eq
;
warningRegex:Date format \[dd/MMM/yyyy:HH:mm:ss Z\] contains textual field specifiers that could change in JDK 23.*
warningRegex:Date format \[yyyy/dd/MM HH:mm:ss Z\] contains textual field specifiers that could change in JDK 23.*
d1:datetime | eq:boolean
2022-07-12T01:24:10.000Z | true
@ -529,6 +533,7 @@ row s = "2022/12/07 09:24:10", format="yyyy/dd/MM HH:mm:ss"
| eval with_tz = date_parse(concat(format, " Z"), concat(s, " +0900"))
| keep s, no_tz, with_tz
;
warningRegex:Date format \[yyyy/dd/MM HH:mm:ss Z\] contains textual field specifiers that could change in JDK 23.*
s:keyword | no_tz:datetime | with_tz:datetime
2022/12/07 09:24:10 | 2022-07-12T09:24:10.000Z | 2022-07-12T00:24:10.000Z
@ -543,6 +548,7 @@ row s = "2022/12/07 09:24:10", format="yyyy/dd/MM HH:mm:ss"
| eval with_tz4 = date_parse(concat(format, " O"), concat(s, " GMT+9"))
| keep s, with_tz*
;
warningRegex:Date format \[yyyy/dd/MM HH:mm:ss .\] contains textual field specifiers that could change in JDK 23.*
s:keyword | with_tz1:datetime | with_tz2:datetime | with_tz3:datetime | with_tz4:datetime
2022/12/07 09:24:10 | 2022-07-12T00:24:10.000Z | 2022-07-12T00:24:10.000Z | 2022-07-12T00:24:10.000Z | 2022-07-12T00:24:10.000Z

View file

@ -8,7 +8,7 @@
###############################################
basicOnTheDataNode
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM employees
| EVAL language_code = languages
@ -25,7 +25,7 @@ emp_no:integer | language_code:integer | language_name:keyword
;
basicRow
required_capability: join_lookup_v8
required_capability: join_lookup_v9
ROW language_code = 1
| LOOKUP JOIN languages_lookup ON language_code
@ -36,7 +36,7 @@ language_code:integer | language_name:keyword
;
basicOnTheCoordinator
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM employees
| SORT emp_no
@ -53,7 +53,7 @@ emp_no:integer | language_code:integer | language_name:keyword
;
subsequentEvalOnTheDataNode
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM employees
| EVAL language_code = languages
@ -71,7 +71,7 @@ emp_no:integer | language_code:integer | language_name:keyword | language_code_x
;
subsequentEvalOnTheCoordinator
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM employees
| SORT emp_no
@ -89,7 +89,7 @@ emp_no:integer | language_code:integer | language_name:keyword | language_code_x
;
sortEvalBeforeLookup
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM employees
| SORT emp_no
@ -106,7 +106,7 @@ emp_no:integer | language_code:integer | language_name:keyword
;
nonUniqueLeftKeyOnTheDataNode
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM employees
| WHERE emp_no <= 10030
@ -130,7 +130,7 @@ emp_no:integer | language_code:integer | language_name:keyword
;
nonUniqueRightKeyOnTheDataNode
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM employees
| EVAL language_code = emp_no % 10
@ -150,7 +150,7 @@ emp_no:integer | language_code:integer | language_name:keyword | country:k
;
nonUniqueRightKeyOnTheCoordinator
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM employees
| SORT emp_no
@ -170,7 +170,7 @@ emp_no:integer | language_code:integer | language_name:keyword | country:k
;
nonUniqueRightKeyFromRow
required_capability: join_lookup_v8
required_capability: join_lookup_v9
ROW language_code = 2
| LOOKUP JOIN languages_lookup_non_unique_key ON language_code
@ -183,8 +183,7 @@ language_code:integer | language_name:keyword | country:keyword
;
repeatedIndexOnFrom
required_capability: join_lookup_v8
required_capability: join_lookup_repeated_index_from
required_capability: join_lookup_v9
FROM languages_lookup
| LOOKUP JOIN languages_lookup ON language_code
@ -202,7 +201,7 @@ dropAllLookedUpFieldsOnTheDataNode-Ignore
// Depends on
// https://github.com/elastic/elasticsearch/issues/118778
// https://github.com/elastic/elasticsearch/issues/118781
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM employees
| EVAL language_code = emp_no % 10
@ -223,7 +222,7 @@ dropAllLookedUpFieldsOnTheCoordinator-Ignore
// Depends on
// https://github.com/elastic/elasticsearch/issues/118778
// https://github.com/elastic/elasticsearch/issues/118781
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM employees
| SORT emp_no
@ -248,7 +247,7 @@ emp_no:integer
###############################################
filterOnLeftSide
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM employees
| EVAL language_code = languages
@ -265,7 +264,7 @@ emp_no:integer | language_code:integer | language_name:keyword
;
filterOnRightSide
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM sample_data
| LOOKUP JOIN message_types_lookup ON message
@ -281,7 +280,7 @@ FROM sample_data
;
filterOnRightSideAfterStats
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM sample_data
| LOOKUP JOIN message_types_lookup ON message
@ -294,7 +293,7 @@ count:long | type:keyword
;
filterOnJoinKey
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM employees
| EVAL language_code = languages
@ -309,7 +308,7 @@ emp_no:integer | language_code:integer | language_name:keyword
;
filterOnJoinKeyAndRightSide
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM employees
| WHERE emp_no < 10006
@ -326,7 +325,7 @@ emp_no:integer | language_code:integer | language_name:keyword
;
filterOnRightSideOnTheCoordinator
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM employees
| SORT emp_no
@ -342,7 +341,7 @@ emp_no:integer | language_code:integer | language_name:keyword
;
filterOnJoinKeyOnTheCoordinator
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM employees
| SORT emp_no
@ -358,7 +357,7 @@ emp_no:integer | language_code:integer | language_name:keyword
;
filterOnJoinKeyAndRightSideOnTheCoordinator
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM employees
| SORT emp_no
@ -375,7 +374,7 @@ emp_no:integer | language_code:integer | language_name:keyword
;
filterOnTheDataNodeThenFilterOnTheCoordinator
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM employees
| EVAL language_code = languages
@ -396,7 +395,7 @@ emp_no:integer | language_code:integer | language_name:keyword
###########################################################################
nullJoinKeyOnTheDataNode
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM employees
| WHERE emp_no < 10004
@ -413,7 +412,7 @@ emp_no:integer | language_code:integer | language_name:keyword
;
mvJoinKeyOnTheDataNode
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM employees
| WHERE 10003 < emp_no AND emp_no < 10008
@ -431,7 +430,7 @@ emp_no:integer | language_code:integer | language_name:keyword
;
mvJoinKeyFromRow
required_capability: join_lookup_v8
required_capability: join_lookup_v9
ROW language_code = [4, 5, 6, 7]
| LOOKUP JOIN languages_lookup_non_unique_key ON language_code
@ -444,7 +443,7 @@ language_code:integer | language_name:keyword | country:keyword
;
mvJoinKeyFromRowExpanded
required_capability: join_lookup_v8
required_capability: join_lookup_v9
ROW language_code = [4, 5, 6, 7, 8]
| MV_EXPAND language_code
@ -466,7 +465,7 @@ language_code:integer | language_name:keyword | country:keyword
###########################################################################
joinOnNestedField
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM employees
| WHERE 10000 < emp_no AND emp_no < 10006
@ -486,7 +485,7 @@ emp_no:integer | language.id:integer | language.name:text
joinOnNestedFieldRow
required_capability: join_lookup_v8
required_capability: join_lookup_v9
ROW language.code = "EN"
| LOOKUP JOIN languages_nested_fields ON language.code
@ -499,7 +498,7 @@ language.id:integer | language.code:keyword | language.name.keyword:keyword
joinOnNestedNestedFieldRow
required_capability: join_lookup_v8
required_capability: join_lookup_v9
ROW language.name.keyword = "English"
| LOOKUP JOIN languages_nested_fields ON language.name.keyword
@ -515,7 +514,7 @@ language.id:integer | language.name:text | language.name.keyword:keyword
###############################################
lookupIPFromRow
required_capability: join_lookup_v8
required_capability: join_lookup_v9
ROW left = "left", client_ip = "172.21.0.5", right = "right"
| LOOKUP JOIN clientips_lookup ON client_ip
@ -526,7 +525,7 @@ left | 172.21.0.5 | right | Development
;
lookupIPFromKeepRow
required_capability: join_lookup_v8
required_capability: join_lookup_v9
ROW left = "left", client_ip = "172.21.0.5", right = "right"
| KEEP left, client_ip, right
@ -538,7 +537,7 @@ left | 172.21.0.5 | right | Development
;
lookupIPFromRowWithShadowing
required_capability: join_lookup_v8
required_capability: join_lookup_v9
ROW left = "left", client_ip = "172.21.0.5", env = "env", right = "right"
| LOOKUP JOIN clientips_lookup ON client_ip
@ -549,7 +548,7 @@ left | 172.21.0.5 | right | Development
;
lookupIPFromRowWithShadowingKeep
required_capability: join_lookup_v8
required_capability: join_lookup_v9
ROW left = "left", client_ip = "172.21.0.5", env = "env", right = "right"
| EVAL client_ip = client_ip::keyword
@ -562,7 +561,7 @@ left | 172.21.0.5 | right | Development
;
lookupIPFromRowWithShadowingKeepReordered
required_capability: join_lookup_v8
required_capability: join_lookup_v9
ROW left = "left", client_ip = "172.21.0.5", env = "env", right = "right"
| EVAL client_ip = client_ip::keyword
@ -575,7 +574,7 @@ right | Development | 172.21.0.5
;
lookupIPFromIndex
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM sample_data
| EVAL client_ip = client_ip::keyword
@ -594,7 +593,7 @@ ignoreOrder:true
;
lookupIPFromIndexKeep
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM sample_data
| EVAL client_ip = client_ip::keyword
@ -614,7 +613,7 @@ ignoreOrder:true
;
lookupIPFromIndexKeepKeep
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM sample_data
| KEEP client_ip, event_duration, @timestamp, message
@ -636,7 +635,7 @@ timestamp:date | client_ip:keyword | event_duration:long | msg:keyword
;
lookupIPFromIndexStats
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM sample_data
| EVAL client_ip = client_ip::keyword
@ -652,7 +651,7 @@ count:long | env:keyword
;
lookupIPFromIndexStatsKeep
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM sample_data
| EVAL client_ip = client_ip::keyword
@ -669,7 +668,7 @@ count:long | env:keyword
;
statsAndLookupIPFromIndex
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM sample_data
| EVAL client_ip = client_ip::keyword
@ -690,7 +689,7 @@ count:long | client_ip:keyword | env:keyword
###############################################
lookupMessageFromRow
required_capability: join_lookup_v8
required_capability: join_lookup_v9
ROW left = "left", message = "Connected to 10.1.0.1", right = "right"
| LOOKUP JOIN message_types_lookup ON message
@ -701,7 +700,7 @@ left | Connected to 10.1.0.1 | right | Success
;
lookupMessageFromKeepRow
required_capability: join_lookup_v8
required_capability: join_lookup_v9
ROW left = "left", message = "Connected to 10.1.0.1", right = "right"
| KEEP left, message, right
@ -713,7 +712,7 @@ left | Connected to 10.1.0.1 | right | Success
;
lookupMessageFromRowWithShadowing
required_capability: join_lookup_v8
required_capability: join_lookup_v9
ROW left = "left", message = "Connected to 10.1.0.1", type = "unknown", right = "right"
| LOOKUP JOIN message_types_lookup ON message
@ -724,7 +723,7 @@ left | Connected to 10.1.0.1 | right | Success
;
lookupMessageFromRowWithShadowingKeep
required_capability: join_lookup_v8
required_capability: join_lookup_v9
ROW left = "left", message = "Connected to 10.1.0.1", type = "unknown", right = "right"
| LOOKUP JOIN message_types_lookup ON message
@ -736,7 +735,7 @@ left | Connected to 10.1.0.1 | right | Success
;
lookupMessageFromIndex
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM sample_data
| LOOKUP JOIN message_types_lookup ON message
@ -754,7 +753,7 @@ ignoreOrder:true
;
lookupMessageFromIndexKeep
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM sample_data
| LOOKUP JOIN message_types_lookup ON message
@ -773,7 +772,7 @@ ignoreOrder:true
;
lookupMessageFromIndexKeepKeep
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM sample_data
| KEEP client_ip, event_duration, @timestamp, message
@ -793,7 +792,7 @@ ignoreOrder:true
;
lookupMessageFromIndexKeepReordered
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM sample_data
| LOOKUP JOIN message_types_lookup ON message
@ -812,7 +811,7 @@ Success | 172.21.2.162 | 3450233 | Connected to 10.1.0.3
;
lookupMessageFromIndexStats
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM sample_data
| LOOKUP JOIN message_types_lookup ON message
@ -827,7 +826,7 @@ count:long | type:keyword
;
lookupMessageFromIndexStatsKeep
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM sample_data
| LOOKUP JOIN message_types_lookup ON message
@ -843,7 +842,7 @@ count:long | type:keyword
;
statsAndLookupMessageFromIndex
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM sample_data
| STATS count = count(message) BY message
@ -861,7 +860,7 @@ count:long | type:keyword | message:keyword
;
lookupMessageFromIndexTwice
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM sample_data
| LOOKUP JOIN message_types_lookup ON message
@ -883,7 +882,7 @@ ignoreOrder:true
;
lookupMessageFromIndexTwiceKeep
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM sample_data
| LOOKUP JOIN message_types_lookup ON message
@ -906,7 +905,7 @@ ignoreOrder:true
;
lookupMessageFromIndexTwiceFullyShadowing
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM sample_data
| LOOKUP JOIN message_types_lookup ON message
@ -930,7 +929,7 @@ ignoreOrder:true
###############################################
lookupIPAndMessageFromRow
required_capability: join_lookup_v8
required_capability: join_lookup_v9
ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", right = "right"
| LOOKUP JOIN clientips_lookup ON client_ip
@ -942,7 +941,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel
;
lookupIPAndMessageFromRowKeepBefore
required_capability: join_lookup_v8
required_capability: join_lookup_v9
ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", right = "right"
| KEEP left, client_ip, message, right
@ -955,7 +954,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel
;
lookupIPAndMessageFromRowKeepBetween
required_capability: join_lookup_v8
required_capability: join_lookup_v9
ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", right = "right"
| LOOKUP JOIN clientips_lookup ON client_ip
@ -968,7 +967,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel
;
lookupIPAndMessageFromRowKeepAfter
required_capability: join_lookup_v8
required_capability: join_lookup_v9
ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", right = "right"
| LOOKUP JOIN clientips_lookup ON client_ip
@ -981,7 +980,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel
;
lookupIPAndMessageFromRowWithShadowing
required_capability: join_lookup_v8
required_capability: join_lookup_v9
ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", env = "env", type = "type", right = "right"
| LOOKUP JOIN clientips_lookup ON client_ip
@ -993,7 +992,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel
;
lookupIPAndMessageFromRowWithShadowingKeep
required_capability: join_lookup_v8
required_capability: join_lookup_v9
ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", env = "env", right = "right"
| EVAL client_ip = client_ip::keyword
@ -1007,7 +1006,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel
;
lookupIPAndMessageFromRowWithShadowingKeepKeep
required_capability: join_lookup_v8
required_capability: join_lookup_v9
ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", env = "env", right = "right"
| EVAL client_ip = client_ip::keyword
@ -1022,7 +1021,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel
;
lookupIPAndMessageFromRowWithShadowingKeepKeepKeep
required_capability: join_lookup_v8
required_capability: join_lookup_v9
ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", env = "env", right = "right"
| EVAL client_ip = client_ip::keyword
@ -1038,7 +1037,7 @@ left | 172.21.0.5 | Connected to 10.1.0.1 | right | Devel
;
lookupIPAndMessageFromRowWithShadowingKeepReordered
required_capability: join_lookup_v8
required_capability: join_lookup_v9
ROW left = "left", client_ip = "172.21.0.5", message = "Connected to 10.1.0.1", env = "env", right = "right"
| EVAL client_ip = client_ip::keyword
@ -1052,7 +1051,7 @@ right | Development | Success | 172.21.0.5
;
lookupIPAndMessageFromIndex
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM sample_data
| EVAL client_ip = client_ip::keyword
@ -1072,7 +1071,7 @@ ignoreOrder:true
;
lookupIPAndMessageFromIndexKeep
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM sample_data
| EVAL client_ip = client_ip::keyword
@ -1093,7 +1092,7 @@ ignoreOrder:true
;
lookupIPAndMessageFromIndexStats
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM sample_data
| EVAL client_ip = client_ip::keyword
@ -1111,7 +1110,7 @@ count:long | env:keyword | type:keyword
;
lookupIPAndMessageFromIndexStatsKeep
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM sample_data
| EVAL client_ip = client_ip::keyword
@ -1130,7 +1129,7 @@ count:long | env:keyword | type:keyword
;
statsAndLookupIPAndMessageFromIndex
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM sample_data
| EVAL client_ip = client_ip::keyword
@ -1149,7 +1148,7 @@ count:long | client_ip:keyword | message:keyword | env:keyword | type:keyw
;
lookupIPAndMessageFromIndexChainedEvalKeep
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM sample_data
| EVAL client_ip = client_ip::keyword
@ -1171,7 +1170,7 @@ ignoreOrder:true
;
lookupIPAndMessageFromIndexChainedRenameKeep
required_capability: join_lookup_v8
required_capability: join_lookup_v9
FROM sample_data
| EVAL client_ip = client_ip::keyword

View file

@ -1231,6 +1231,189 @@ a:keyword | upper:keyword | lower:keyword
π/2 + a + B + Λ ºC | Π/2 + A + B + Λ ºC | π/2 + a + b + λ ºc
;
equalsToUpperPushedDown#[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13]
from employees
| where to_upper(first_name) == "GEORGI"
| keep emp_no, first_name
;
emp_no:integer | first_name:keyword
10001 | Georgi
;
equalsToUpperNestedPushedDown#[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13]
from employees
| where to_upper(to_upper(to_lower(first_name))) == "GEORGI"
| keep emp_no, first_name
;
emp_no:integer | first_name:keyword
10001 | Georgi
;
negatedEqualsToUpperPushedDown#[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13]
from employees
| sort emp_no
| where not(to_upper(first_name) == "GEORGI")
| keep emp_no, first_name
| limit 1
;
emp_no:integer | first_name:keyword
10002 | Bezalel
;
notEqualsToUpperPushedDown#[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13]
from employees
| sort emp_no
| where to_upper(first_name) != "GEORGI"
| keep emp_no, first_name
| limit 1
;
emp_no:integer | first_name:keyword
10002 | Bezalel
;
negatedNotEqualsToUpperPushedDown#[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13]
from employees
| sort emp_no
| where not(to_upper(first_name) != "GEORGI")
| keep emp_no, first_name
| limit 1
;
emp_no:integer | first_name:keyword
10001 | Georgi
;
equalsToUpperFolded
from employees
| where to_upper(first_name) == "Georgi"
| keep emp_no, first_name
;
emp_no:integer | first_name:keyword
;
negatedEqualsToUpperFolded
from employees
| where not(to_upper(first_name) == "Georgi")
| stats c = count()
;
c:long
90
;
equalsToUpperNullFolded#[skip:-8.16.99, reason:function's type corrected in #114334]
from employees
| where to_upper(null) == "Georgi"
| keep emp_no, first_name
;
emp_no:integer | first_name:keyword
;
equalsNullToUpperFolded
from employees
| where to_upper(first_name) == null::keyword
| keep emp_no, first_name
;
emp_no:integer | first_name:keyword
;
notEqualsToUpperNullFolded#[skip:-8.16.99, reason:function's type corrected in #114334]
from employees
| where to_upper(null) != "Georgi"
| keep emp_no, first_name
;
emp_no:integer | first_name:keyword
;
notEqualsNullToUpperFolded
from employees
| where to_upper(first_name) != null::keyword
| keep emp_no, first_name
;
emp_no:integer | first_name:keyword
;
notEqualsToUpperFolded
from employees
| where to_upper(first_name) != "Georgi"
| stats c = count()
;
c:long
90
;
negatedNotEqualsToUpperFolded
from employees
| where not(to_upper(first_name) != "Georgi")
| stats c = count()
;
c:long
0
;
equalsToLowerPushedDown#[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13]
from employees
| where to_lower(first_name) == "georgi"
| keep emp_no, first_name
;
emp_no:integer | first_name:keyword
10001 | Georgi
;
notEqualsToLowerPushedDown#[skip:-8.12.99, reason:case insensitive operators implemented in v 8.13]
from employees
| sort emp_no
| where to_lower(first_name) != "georgi"
| keep emp_no, first_name
| limit 1
;
emp_no:integer | first_name:keyword
10002 | Bezalel
;
equalsToLowerFolded
from employees
| where to_lower(first_name) == "Georgi"
| keep emp_no, first_name
;
emp_no:integer | first_name:keyword
;
notEqualsToLowerFolded
from employees
| where to_lower(first_name) != "Georgi"
| stats c = count()
;
c:long
90
;
equalsToLowerWithUnico(rn|d)s
from employees
| where to_lower(concat(first_name, "🦄🦄")) != "georgi🦄🦄"
| stats c = count()
;
// 10 null first names
c:long
89
;
reverse
required_capability: fn_reverse
from employees | sort emp_no | eval name_reversed = REVERSE(first_name) | keep emp_no, first_name, name_reversed | limit 1;

View file

@ -20,25 +20,28 @@ import org.elasticsearch.core.Releasables;
import org.elasticsearch.xpack.esql.core.tree.Source;
/**
* {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToLower}.
* {@link EvalOperator.ExpressionEvaluator} implementation for {@link ChangeCase}.
* This class is generated. Do not edit it.
*/
public final class ToLowerEvaluator implements EvalOperator.ExpressionEvaluator {
public final class ChangeCaseEvaluator implements EvalOperator.ExpressionEvaluator {
private final Source source;
private final EvalOperator.ExpressionEvaluator val;
private final Locale locale;
private final ChangeCase.Case caseType;
private final DriverContext driverContext;
private Warnings warnings;
public ToLowerEvaluator(Source source, EvalOperator.ExpressionEvaluator val, Locale locale,
DriverContext driverContext) {
public ChangeCaseEvaluator(Source source, EvalOperator.ExpressionEvaluator val, Locale locale,
ChangeCase.Case caseType, DriverContext driverContext) {
this.source = source;
this.val = val;
this.locale = locale;
this.caseType = caseType;
this.driverContext = driverContext;
}
@ -68,7 +71,7 @@ public final class ToLowerEvaluator implements EvalOperator.ExpressionEvaluator
result.appendNull();
continue position;
}
result.appendBytesRef(ToLower.process(valBlock.getBytesRef(valBlock.getFirstValueIndex(p), valScratch), this.locale));
result.appendBytesRef(ChangeCase.process(valBlock.getBytesRef(valBlock.getFirstValueIndex(p), valScratch), this.locale, this.caseType));
}
return result.build();
}
@ -78,7 +81,7 @@ public final class ToLowerEvaluator implements EvalOperator.ExpressionEvaluator
try(BytesRefVector.Builder result = driverContext.blockFactory().newBytesRefVectorBuilder(positionCount)) {
BytesRef valScratch = new BytesRef();
position: for (int p = 0; p < positionCount; p++) {
result.appendBytesRef(ToLower.process(valVector.getBytesRef(p, valScratch), this.locale));
result.appendBytesRef(ChangeCase.process(valVector.getBytesRef(p, valScratch), this.locale, this.caseType));
}
return result.build();
}
@ -86,7 +89,7 @@ public final class ToLowerEvaluator implements EvalOperator.ExpressionEvaluator
@Override
public String toString() {
return "ToLowerEvaluator[" + "val=" + val + ", locale=" + locale + "]";
return "ChangeCaseEvaluator[" + "val=" + val + ", locale=" + locale + ", caseType=" + caseType + "]";
}
@Override
@ -113,20 +116,24 @@ public final class ToLowerEvaluator implements EvalOperator.ExpressionEvaluator
private final Locale locale;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val, Locale locale) {
private final ChangeCase.Case caseType;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val, Locale locale,
ChangeCase.Case caseType) {
this.source = source;
this.val = val;
this.locale = locale;
this.caseType = caseType;
}
@Override
public ToLowerEvaluator get(DriverContext context) {
return new ToLowerEvaluator(source, val.get(context), locale, context);
public ChangeCaseEvaluator get(DriverContext context) {
return new ChangeCaseEvaluator(source, val.get(context), locale, caseType, context);
}
@Override
public String toString() {
return "ToLowerEvaluator[" + "val=" + val + ", locale=" + locale + "]";
return "ChangeCaseEvaluator[" + "val=" + val + ", locale=" + locale + ", caseType=" + caseType + "]";
}
}
}

View file

@ -1,132 +0,0 @@
// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
// or more contributor license agreements. Licensed under the Elastic License
// 2.0; you may not use this file except in compliance with the Elastic License
// 2.0.
package org.elasticsearch.xpack.esql.expression.function.scalar.string;
import java.lang.IllegalArgumentException;
import java.lang.Override;
import java.lang.String;
import java.util.Locale;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.compute.data.Block;
import org.elasticsearch.compute.data.BytesRefBlock;
import org.elasticsearch.compute.data.BytesRefVector;
import org.elasticsearch.compute.data.Page;
import org.elasticsearch.compute.operator.DriverContext;
import org.elasticsearch.compute.operator.EvalOperator;
import org.elasticsearch.compute.operator.Warnings;
import org.elasticsearch.core.Releasables;
import org.elasticsearch.xpack.esql.core.tree.Source;
/**
* {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToUpper}.
* This class is generated. Do not edit it.
*/
public final class ToUpperEvaluator implements EvalOperator.ExpressionEvaluator {
private final Source source;
private final EvalOperator.ExpressionEvaluator val;
private final Locale locale;
private final DriverContext driverContext;
private Warnings warnings;
public ToUpperEvaluator(Source source, EvalOperator.ExpressionEvaluator val, Locale locale,
DriverContext driverContext) {
this.source = source;
this.val = val;
this.locale = locale;
this.driverContext = driverContext;
}
@Override
public Block eval(Page page) {
try (BytesRefBlock valBlock = (BytesRefBlock) val.eval(page)) {
BytesRefVector valVector = valBlock.asVector();
if (valVector == null) {
return eval(page.getPositionCount(), valBlock);
}
return eval(page.getPositionCount(), valVector).asBlock();
}
}
public BytesRefBlock eval(int positionCount, BytesRefBlock valBlock) {
try(BytesRefBlock.Builder result = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) {
BytesRef valScratch = new BytesRef();
position: for (int p = 0; p < positionCount; p++) {
if (valBlock.isNull(p)) {
result.appendNull();
continue position;
}
if (valBlock.getValueCount(p) != 1) {
if (valBlock.getValueCount(p) > 1) {
warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value"));
}
result.appendNull();
continue position;
}
result.appendBytesRef(ToUpper.process(valBlock.getBytesRef(valBlock.getFirstValueIndex(p), valScratch), this.locale));
}
return result.build();
}
}
public BytesRefVector eval(int positionCount, BytesRefVector valVector) {
try(BytesRefVector.Builder result = driverContext.blockFactory().newBytesRefVectorBuilder(positionCount)) {
BytesRef valScratch = new BytesRef();
position: for (int p = 0; p < positionCount; p++) {
result.appendBytesRef(ToUpper.process(valVector.getBytesRef(p, valScratch), this.locale));
}
return result.build();
}
}
@Override
public String toString() {
return "ToUpperEvaluator[" + "val=" + val + ", locale=" + locale + "]";
}
@Override
public void close() {
Releasables.closeExpectNoException(val);
}
private Warnings warnings() {
if (warnings == null) {
this.warnings = Warnings.createWarnings(
driverContext.warningsMode(),
source.source().getLineNumber(),
source.source().getColumnNumber(),
source.text()
);
}
return warnings;
}
static class Factory implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory val;
private final Locale locale;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val, Locale locale) {
this.source = source;
this.val = val;
this.locale = locale;
}
@Override
public ToUpperEvaluator get(DriverContext context) {
return new ToUpperEvaluator(source, val.get(context), locale, context);
}
@Override
public String toString() {
return "ToUpperEvaluator[" + "val=" + val + ", locale=" + locale + "]";
}
}
}

View file

@ -560,12 +560,7 @@ public class EsqlCapabilities {
/**
* LOOKUP JOIN
*/
JOIN_LOOKUP_V8(Build.current().isSnapshot()),
/**
* LOOKUP JOIN with the same index as the FROM
*/
JOIN_LOOKUP_REPEATED_INDEX_FROM(JOIN_LOOKUP_V8.isEnabled()),
JOIN_LOOKUP_V9(Build.current().isSnapshot()),
/**
* Fix for https://github.com/elastic/elasticsearch/issues/117054

View file

@ -132,7 +132,6 @@ import java.util.stream.IntStream;
*/
abstract class AbstractLookupService<R extends AbstractLookupService.Request, T extends AbstractLookupService.TransportRequest> {
private final String actionName;
private final String privilegeName;
private final ClusterService clusterService;
private final SearchService searchService;
private final TransportService transportService;
@ -143,7 +142,6 @@ abstract class AbstractLookupService<R extends AbstractLookupService.Request, T
AbstractLookupService(
String actionName,
String privilegeName,
ClusterService clusterService,
SearchService searchService,
TransportService transportService,
@ -152,7 +150,6 @@ abstract class AbstractLookupService<R extends AbstractLookupService.Request, T
CheckedBiFunction<StreamInput, BlockFactory, T, IOException> readRequest
) {
this.actionName = actionName;
this.privilegeName = privilegeName;
this.clusterService = clusterService;
this.searchService = searchService;
this.transportService = transportService;
@ -237,9 +234,21 @@ abstract class AbstractLookupService<R extends AbstractLookupService.Request, T
}));
}
/**
* Get the privilege required to perform the lookup.
* <p>
* If null is returned, no privilege check will be performed.
* </p>
*/
@Nullable
protected abstract String getRequiredPrivilege();
private void hasPrivilege(ActionListener<Void> outListener) {
final Settings settings = clusterService.getSettings();
if (settings.hasValue(XPackSettings.SECURITY_ENABLED.getKey()) == false || XPackSettings.SECURITY_ENABLED.get(settings) == false) {
String privilegeName = getRequiredPrivilege();
if (privilegeName == null
|| settings.hasValue(XPackSettings.SECURITY_ENABLED.getKey()) == false
|| XPackSettings.SECURITY_ENABLED.get(settings) == false) {
outListener.onResponse(null);
return;
}

View file

@ -52,16 +52,7 @@ public class EnrichLookupService extends AbstractLookupService<EnrichLookupServi
BigArrays bigArrays,
BlockFactory blockFactory
) {
super(
LOOKUP_ACTION_NAME,
ClusterPrivilegeResolver.MONITOR_ENRICH.name(),
clusterService,
searchService,
transportService,
bigArrays,
blockFactory,
TransportRequest::readFrom
);
super(LOOKUP_ACTION_NAME, clusterService, searchService, transportService, bigArrays, blockFactory, TransportRequest::readFrom);
}
@Override
@ -90,6 +81,11 @@ public class EnrichLookupService extends AbstractLookupService<EnrichLookupServi
};
}
@Override
protected String getRequiredPrivilege() {
return ClusterPrivilegeResolver.MONITOR_ENRICH.name();
}
private static void validateTypes(DataType inputDataType, MappedFieldType fieldType) {
if (fieldType instanceof RangeFieldMapper.RangeFieldType rangeType) {
// For range policy types, the ENRICH index field type will be one of a list of supported range types,

View file

@ -23,7 +23,6 @@ import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.search.SearchService;
import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver;
import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException;
import org.elasticsearch.xpack.esql.action.EsqlQueryAction;
import org.elasticsearch.xpack.esql.core.expression.NamedExpression;
@ -50,16 +49,7 @@ public class LookupFromIndexService extends AbstractLookupService<LookupFromInde
BigArrays bigArrays,
BlockFactory blockFactory
) {
super(
LOOKUP_ACTION_NAME,
ClusterPrivilegeResolver.MONITOR_ENRICH.name(), // TODO some other privilege
clusterService,
searchService,
transportService,
bigArrays,
blockFactory,
TransportRequest::readFrom
);
super(LOOKUP_ACTION_NAME, clusterService, searchService, transportService, bigArrays, blockFactory, TransportRequest::readFrom);
}
@Override
@ -83,6 +73,11 @@ public class LookupFromIndexService extends AbstractLookupService<LookupFromInde
return termQueryList(fieldType, context, inputBlock, inputDataType);
}
@Override
protected String getRequiredPrivilege() {
return null;
}
private static void validateTypes(DataType inputDataType, MappedFieldType fieldType) {
// TODO: consider supporting implicit type conversion as done in ENRICH for some types
if (fieldType.typeName().equals(inputDataType.typeName()) == false) {

View file

@ -0,0 +1,112 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.esql.expression.function.scalar.string;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.compute.ann.Evaluator;
import org.elasticsearch.compute.ann.Fixed;
import org.elasticsearch.compute.operator.EvalOperator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
import org.elasticsearch.xpack.esql.core.tree.Source;
import org.elasticsearch.xpack.esql.core.type.DataType;
import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlConfigurationFunction;
import org.elasticsearch.xpack.esql.session.Configuration;
import java.util.List;
import java.util.Locale;
import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT;
import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString;
public abstract class ChangeCase extends EsqlConfigurationFunction {
public enum Case {
UPPER {
@Override
String process(String value, Locale locale) {
return value.toUpperCase(locale);
}
@Override
public boolean matchesCase(String value) {
return value.codePoints().allMatch(cp -> Character.getType(cp) != Character.LOWERCASE_LETTER);
}
},
LOWER {
@Override
String process(String value, Locale locale) {
return value.toLowerCase(locale);
}
@Override
public boolean matchesCase(String value) {
return value.codePoints().allMatch(cp -> Character.getType(cp) != Character.UPPERCASE_LETTER);
}
};
abstract String process(String value, Locale locale);
public abstract boolean matchesCase(String value);
}
private final Expression field;
private final Case caseType;
protected ChangeCase(Source source, Expression field, Configuration configuration, Case caseType) {
super(source, List.of(field), configuration);
this.field = field;
this.caseType = caseType;
}
@Override
public DataType dataType() {
return DataType.KEYWORD;
}
@Override
protected TypeResolution resolveType() {
if (childrenResolved() == false) {
return new TypeResolution("Unresolved children");
}
return isString(field, sourceText(), DEFAULT);
}
@Override
public boolean foldable() {
return field.foldable();
}
public Expression field() {
return field;
}
public Case caseType() {
return caseType;
}
public abstract Expression replaceChild(Expression child);
@Override
public Expression replaceChildren(List<Expression> newChildren) {
assert newChildren.size() == 1;
return replaceChild(newChildren.get(0));
}
@Evaluator
static BytesRef process(BytesRef val, @Fixed Locale locale, @Fixed Case caseType) {
return BytesRefs.toBytesRef(caseType.process(val.utf8ToString(), locale));
}
@Override
public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) {
var fieldEvaluator = toEvaluator.apply(field);
return new ChangeCaseEvaluator.Factory(source(), fieldEvaluator, configuration().locale(), caseType);
}
}

View file

@ -7,37 +7,23 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.string;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.compute.ann.Evaluator;
import org.elasticsearch.compute.ann.Fixed;
import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
import org.elasticsearch.xpack.esql.core.tree.NodeInfo;
import org.elasticsearch.xpack.esql.core.tree.Source;
import org.elasticsearch.xpack.esql.core.type.DataType;
import org.elasticsearch.xpack.esql.expression.function.Example;
import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlConfigurationFunction;
import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput;
import org.elasticsearch.xpack.esql.session.Configuration;
import java.io.IOException;
import java.util.List;
import java.util.Locale;
import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT;
import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString;
public class ToLower extends EsqlConfigurationFunction {
public class ToLower extends ChangeCase {
public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "ToLower", ToLower::new);
private final Expression field;
@FunctionInfo(
returnType = { "keyword" },
description = "Returns a new string representing the input string converted to lower case.",
@ -52,8 +38,7 @@ public class ToLower extends EsqlConfigurationFunction {
) Expression field,
Configuration configuration
) {
super(source, List.of(field), configuration);
this.field = field;
super(source, field, configuration, Case.LOWER);
}
private ToLower(StreamInput in) throws IOException {
@ -70,52 +55,12 @@ public class ToLower extends EsqlConfigurationFunction {
return ENTRY.name;
}
@Override
public DataType dataType() {
return DataType.KEYWORD;
}
@Override
protected TypeResolution resolveType() {
if (childrenResolved() == false) {
return new TypeResolution("Unresolved children");
}
return isString(field, sourceText(), DEFAULT);
}
@Override
public boolean foldable() {
return field.foldable();
}
@Evaluator
static BytesRef process(BytesRef val, @Fixed Locale locale) {
return BytesRefs.toBytesRef(val.utf8ToString().toLowerCase(locale));
}
@Override
public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) {
var fieldEvaluator = toEvaluator.apply(field);
return new ToLowerEvaluator.Factory(source(), fieldEvaluator, configuration().locale());
}
public Expression field() {
return field;
}
public ToLower replaceChild(Expression child) {
return new ToLower(source(), child, configuration());
}
@Override
public Expression replaceChildren(List<Expression> newChildren) {
assert newChildren.size() == 1;
return replaceChild(newChildren.get(0));
}
@Override
protected NodeInfo<? extends Expression> info() {
return NodeInfo.create(this, ToLower::new, field, configuration());
return NodeInfo.create(this, ToLower::new, field(), configuration());
}
}

View file

@ -7,37 +7,23 @@
package org.elasticsearch.xpack.esql.expression.function.scalar.string;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.compute.ann.Evaluator;
import org.elasticsearch.compute.ann.Fixed;
import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator;
import org.elasticsearch.xpack.esql.core.expression.Expression;
import org.elasticsearch.xpack.esql.core.tree.NodeInfo;
import org.elasticsearch.xpack.esql.core.tree.Source;
import org.elasticsearch.xpack.esql.core.type.DataType;
import org.elasticsearch.xpack.esql.expression.function.Example;
import org.elasticsearch.xpack.esql.expression.function.FunctionInfo;
import org.elasticsearch.xpack.esql.expression.function.Param;
import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlConfigurationFunction;
import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput;
import org.elasticsearch.xpack.esql.session.Configuration;
import java.io.IOException;
import java.util.List;
import java.util.Locale;
import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT;
import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString;
public class ToUpper extends EsqlConfigurationFunction {
public class ToUpper extends ChangeCase {
public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "ToUpper", ToUpper::new);
private final Expression field;
@FunctionInfo(
returnType = { "keyword" },
description = "Returns a new string representing the input string converted to upper case.",
@ -52,8 +38,7 @@ public class ToUpper extends EsqlConfigurationFunction {
) Expression field,
Configuration configuration
) {
super(source, List.of(field), configuration);
this.field = field;
super(source, field, configuration, Case.UPPER);
}
private ToUpper(StreamInput in) throws IOException {
@ -62,7 +47,7 @@ public class ToUpper extends EsqlConfigurationFunction {
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeNamedWriteable(field);
out.writeNamedWriteable(field());
}
@Override
@ -70,52 +55,12 @@ public class ToUpper extends EsqlConfigurationFunction {
return ENTRY.name;
}
@Override
public DataType dataType() {
return DataType.KEYWORD;
}
@Override
protected TypeResolution resolveType() {
if (childrenResolved() == false) {
return new TypeResolution("Unresolved children");
}
return isString(field, sourceText(), DEFAULT);
}
@Override
public boolean foldable() {
return field.foldable();
}
@Evaluator
static BytesRef process(BytesRef val, @Fixed Locale locale) {
return BytesRefs.toBytesRef(val.utf8ToString().toUpperCase(locale));
}
@Override
public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) {
var fieldEvaluator = toEvaluator.apply(field);
return new ToUpperEvaluator.Factory(source(), fieldEvaluator, configuration().locale());
}
public Expression field() {
return field;
}
public ToUpper replaceChild(Expression child) {
return new ToUpper(source(), child, configuration());
}
@Override
public Expression replaceChildren(List<Expression> newChildren) {
assert newChildren.size() == 1;
return replaceChild(newChildren.get(0));
}
@Override
protected NodeInfo<? extends Expression> info() {
return NodeInfo.create(this, ToUpper::new, field, configuration());
return NodeInfo.create(this, ToUpper::new, field(), configuration());
}
}

View file

@ -49,6 +49,7 @@ import org.elasticsearch.xpack.esql.optimizer.rules.logical.ReplaceOrderByExpres
import org.elasticsearch.xpack.esql.optimizer.rules.logical.ReplaceRegexMatch;
import org.elasticsearch.xpack.esql.optimizer.rules.logical.ReplaceRowAsLocalRelation;
import org.elasticsearch.xpack.esql.optimizer.rules.logical.ReplaceStatsFilteredAggWithEval;
import org.elasticsearch.xpack.esql.optimizer.rules.logical.ReplaceStringCasingWithInsensitiveEquals;
import org.elasticsearch.xpack.esql.optimizer.rules.logical.ReplaceTrivialTypeConversions;
import org.elasticsearch.xpack.esql.optimizer.rules.logical.SetAsOptimized;
import org.elasticsearch.xpack.esql.optimizer.rules.logical.SimplifyComparisonsArithmetics;
@ -175,6 +176,7 @@ public class LogicalPlanOptimizer extends ParameterizedRuleExecutor<LogicalPlan,
new CombineDisjunctions(),
// TODO: bifunction can now (since we now have just one data types set) be pushed into the rule
new SimplifyComparisonsArithmetics(DataType::areCompatible),
new ReplaceStringCasingWithInsensitiveEquals(),
new ReplaceStatsFilteredAggWithEval(),
new ExtractAggregateCommonFilter(),
// prune/elimination

View file

@ -0,0 +1,68 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.esql.optimizer.rules.logical;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.xpack.esql.core.expression.Expression;
import org.elasticsearch.xpack.esql.core.expression.Literal;
import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction;
import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not;
import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull;
import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison;
import org.elasticsearch.xpack.esql.expression.function.scalar.string.ChangeCase;
import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals;
import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.InsensitiveEquals;
import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NotEquals;
public class ReplaceStringCasingWithInsensitiveEquals extends OptimizerRules.OptimizerExpressionRule<ScalarFunction> {
public ReplaceStringCasingWithInsensitiveEquals() {
super(OptimizerRules.TransformDirection.DOWN);
}
@Override
protected Expression rule(ScalarFunction sf) {
Expression e = sf;
if (sf instanceof BinaryComparison bc) {
e = rewriteBinaryComparison(sf, bc, false);
} else if (sf instanceof Not not && not.field() instanceof BinaryComparison bc) {
e = rewriteBinaryComparison(sf, bc, true);
}
return e;
}
private static Expression rewriteBinaryComparison(ScalarFunction sf, BinaryComparison bc, boolean negated) {
Expression e = sf;
if (bc.left() instanceof ChangeCase changeCase && bc.right().foldable()) {
if (bc instanceof Equals) {
e = replaceChangeCase(bc, changeCase, negated);
} else if (bc instanceof NotEquals) { // not actually used currently, `!=` is built as `NOT(==)` already
e = replaceChangeCase(bc, changeCase, negated == false);
}
}
return e;
}
private static Expression replaceChangeCase(BinaryComparison bc, ChangeCase changeCase, boolean negated) {
var foldedRight = BytesRefs.toString(bc.right().fold());
var field = unwrapCase(changeCase.field());
var e = changeCase.caseType().matchesCase(foldedRight)
? new InsensitiveEquals(bc.source(), field, bc.right())
: Literal.of(bc, Boolean.FALSE);
if (negated) {
e = e instanceof Literal ? new IsNotNull(e.source(), field) : new Not(e.source(), e);
}
return e;
}
private static Expression unwrapCase(Expression e) {
for (; e instanceof ChangeCase cc; e = cc.field()) {
}
return e;
}
}

View file

@ -575,6 +575,15 @@ public class LocalExecutionPlanner {
if (localSourceExec.indexMode() != IndexMode.LOOKUP) {
throw new IllegalArgumentException("can't plan [" + join + "]");
}
Map<String, IndexMode> indicesWithModes = localSourceExec.index().indexNameWithModes();
if (indicesWithModes.size() != 1) {
throw new IllegalArgumentException("can't plan [" + join + "], found more than 1 index");
}
var entry = indicesWithModes.entrySet().iterator().next();
if (entry.getValue() != IndexMode.LOOKUP) {
throw new IllegalArgumentException("can't plan [" + join + "], found index with mode [" + entry.getValue() + "]");
}
String indexName = entry.getKey();
List<Layout.ChannelAndType> matchFields = new ArrayList<>(join.leftFields().size());
for (Attribute m : join.leftFields()) {
Layout.ChannelAndType t = source.layout.get(m.id());
@ -595,7 +604,7 @@ public class LocalExecutionPlanner {
matchFields.getFirst().channel(),
lookupFromIndexService,
matchFields.getFirst().type(),
localSourceExec.index().name(),
indexName,
join.leftFields().getFirst().name(),
join.addedFields().stream().map(f -> (NamedExpression) f).toList(),
join.source()

View file

@ -263,7 +263,7 @@ public class CsvTests extends ESTestCase {
);
assumeFalse(
"lookup join disabled for csv tests",
testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.JOIN_LOOKUP_V8.capabilityName())
testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.JOIN_LOOKUP_V9.capabilityName())
);
assumeFalse(
"can't use TERM function in csv tests",

View file

@ -2140,7 +2140,7 @@ public class AnalyzerTests extends ESTestCase {
}
public void testLookupJoinUnknownIndex() {
assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
String errorMessage = "Unknown index [foobar]";
IndexResolution missingLookupIndex = IndexResolution.invalid(errorMessage);
@ -2169,7 +2169,7 @@ public class AnalyzerTests extends ESTestCase {
}
public void testLookupJoinUnknownField() {
assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
String query = "FROM test | LOOKUP JOIN languages_lookup ON last_name";
String errorMessage = "1:45: Unknown column [last_name] in right side of join";
@ -2192,7 +2192,7 @@ public class AnalyzerTests extends ESTestCase {
}
public void testMultipleLookupJoinsGiveDifferentAttributes() {
assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
// The field attributes that get contributed by different LOOKUP JOIN commands must have different name ids,
// even if they have the same names. Otherwise, things like dependency analysis - like in PruneColumns - cannot work based on
@ -2222,7 +2222,7 @@ public class AnalyzerTests extends ESTestCase {
}
public void testLookupJoinIndexMode() {
assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
var indexResolution = AnalyzerTestUtils.expandedDefaultIndexResolution();
var lookupResolution = AnalyzerTestUtils.defaultLookupResolution();

View file

@ -113,7 +113,7 @@ public class ParsingTests extends ESTestCase {
}
public void testJoinOnConstant() {
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
assertEquals(
"1:55: JOIN ON clause only supports fields at the moment, found [123]",
error("row languages = 1, gender = \"f\" | lookup join test on 123")
@ -129,7 +129,7 @@ public class ParsingTests extends ESTestCase {
}
public void testJoinOnMultipleFields() {
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
assertEquals(
"1:35: JOIN ON clause only supports one field at the moment, found [2]",
error("row languages = 1, gender = \"f\" | lookup join test on gender, languages")
@ -137,7 +137,7 @@ public class ParsingTests extends ESTestCase {
}
public void testJoinTwiceOnTheSameField() {
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
assertEquals(
"1:35: JOIN ON clause only supports one field at the moment, found [2]",
error("row languages = 1, gender = \"f\" | lookup join test on languages, languages")
@ -145,7 +145,7 @@ public class ParsingTests extends ESTestCase {
}
public void testJoinTwiceOnTheSameField_TwoLookups() {
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
assertEquals(
"1:80: JOIN ON clause only supports one field at the moment, found [2]",
error("row languages = 1, gender = \"f\" | lookup join test on languages | eval x = 1 | lookup join test on gender, gender")

View file

@ -1974,7 +1974,7 @@ public class VerifierTests extends ESTestCase {
}
public void testLookupJoinDataTypeMismatch() {
assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("requires LOOKUP JOIN capability", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
query("FROM test | EVAL language_code = languages | LOOKUP JOIN languages_lookup ON language_code");

View file

@ -83,7 +83,7 @@ public class ToLowerTests extends AbstractConfigurationFunctionTestCase {
private static TestCaseSupplier supplier(String name, DataType type, Supplier<String> valueSupplier) {
return new TestCaseSupplier(name, List.of(type), () -> {
List<TestCaseSupplier.TypedData> values = new ArrayList<>();
String expectedToString = "ToLowerEvaluator[val=Attribute[channel=0], locale=en_US]";
String expectedToString = "ChangeCaseEvaluator[val=Attribute[channel=0], locale=en_US, caseType=LOWER]";
String value = valueSupplier.get();
values.add(new TestCaseSupplier.TypedData(new BytesRef(value), type, "0"));

View file

@ -83,7 +83,7 @@ public class ToUpperTests extends AbstractConfigurationFunctionTestCase {
private static TestCaseSupplier supplier(String name, DataType type, Supplier<String> valueSupplier) {
return new TestCaseSupplier(name, List.of(type), () -> {
List<TestCaseSupplier.TypedData> values = new ArrayList<>();
String expectedToString = "ToUpperEvaluator[val=Attribute[channel=0], locale=en_US]";
String expectedToString = "ChangeCaseEvaluator[val=Attribute[channel=0], locale=en_US, caseType=UPPER]";
String value = valueSupplier.get();
values.add(new TestCaseSupplier.TypedData(new BytesRef(value), type, "0"));

View file

@ -39,6 +39,7 @@ import org.elasticsearch.xpack.esql.core.expression.Nullability;
import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute;
import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute;
import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And;
import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not;
import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or;
import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull;
import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison;
@ -85,6 +86,7 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equ
import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.EsqlBinaryComparison;
import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThan;
import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In;
import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.InsensitiveEquals;
import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThan;
import org.elasticsearch.xpack.esql.index.EsIndex;
import org.elasticsearch.xpack.esql.index.IndexResolution;
@ -126,6 +128,7 @@ import org.junit.BeforeClass;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.function.BiFunction;
import java.util.function.Function;
@ -4924,7 +4927,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase {
}
public void testPlanSanityCheckWithBinaryPlans() throws Exception {
assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
var plan = optimizedPlan("""
FROM test
@ -5735,6 +5738,78 @@ public class LogicalPlanOptimizerTests extends ESTestCase {
}
}
public void testReplaceStringCasingWithInsensitiveEqualsUpperFalse() {
var plan = optimizedPlan("FROM test | WHERE TO_UPPER(first_name) == \"VALÜe\"");
var local = as(plan, LocalRelation.class);
assertThat(local.supplier(), equalTo(LocalSupplier.EMPTY));
}
public void testReplaceStringCasingWithInsensitiveEqualsUpperTrue() {
var plan = optimizedPlan("FROM test | WHERE TO_UPPER(first_name) != \"VALÜe\"");
var limit = as(plan, Limit.class);
var filter = as(limit.child(), Filter.class);
var isNotNull = as(filter.condition(), IsNotNull.class);
assertThat(Expressions.name(isNotNull.field()), is("first_name"));
as(filter.child(), EsRelation.class);
}
public void testReplaceStringCasingWithInsensitiveEqualsLowerFalse() {
var plan = optimizedPlan("FROM test | WHERE TO_LOWER(first_name) == \"VALÜe\"");
var local = as(plan, LocalRelation.class);
assertThat(local.supplier(), equalTo(LocalSupplier.EMPTY));
}
public void testReplaceStringCasingWithInsensitiveEqualsLowerTrue() {
var plan = optimizedPlan("FROM test | WHERE TO_LOWER(first_name) != \"VALÜe\"");
var limit = as(plan, Limit.class);
var filter = as(limit.child(), Filter.class);
assertThat(filter.condition(), instanceOf(IsNotNull.class));
as(filter.child(), EsRelation.class);
}
public void testReplaceStringCasingWithInsensitiveEqualsEquals() {
for (var fn : List.of("TO_LOWER", "TO_UPPER")) {
var value = fn.equals("TO_LOWER") ? fn.toLowerCase(Locale.ROOT) : fn.toUpperCase(Locale.ROOT);
value += "🐔✈🔥🎉"; // these should not cause folding, they're not in the upper/lower char class
var plan = optimizedPlan("FROM test | WHERE " + fn + "(first_name) == \"" + value + "\"");
var limit = as(plan, Limit.class);
var filter = as(limit.child(), Filter.class);
var insensitive = as(filter.condition(), InsensitiveEquals.class);
as(insensitive.left(), FieldAttribute.class);
var bRef = as(insensitive.right().fold(), BytesRef.class);
assertThat(bRef.utf8ToString(), is(value));
as(filter.child(), EsRelation.class);
}
}
public void testReplaceStringCasingWithInsensitiveEqualsNotEquals() {
for (var fn : List.of("TO_LOWER", "TO_UPPER")) {
var value = fn.equals("TO_LOWER") ? fn.toLowerCase(Locale.ROOT) : fn.toUpperCase(Locale.ROOT);
value += "🐔✈🔥🎉"; // these should not cause folding, they're not in the upper/lower char class
var plan = optimizedPlan("FROM test | WHERE " + fn + "(first_name) != \"" + value + "\"");
var limit = as(plan, Limit.class);
var filter = as(limit.child(), Filter.class);
var not = as(filter.condition(), Not.class);
var insensitive = as(not.field(), InsensitiveEquals.class);
as(insensitive.left(), FieldAttribute.class);
var bRef = as(insensitive.right().fold(), BytesRef.class);
assertThat(bRef.utf8ToString(), is(value));
as(filter.child(), EsRelation.class);
}
}
public void testReplaceStringCasingWithInsensitiveEqualsUnwrap() {
var plan = optimizedPlan("FROM test | WHERE TO_UPPER(TO_LOWER(TO_UPPER(first_name))) == \"VALÜ\"");
var limit = as(plan, Limit.class);
var filter = as(limit.child(), Filter.class);
var insensitive = as(filter.condition(), InsensitiveEquals.class);
var field = as(insensitive.left(), FieldAttribute.class);
assertThat(field.fieldName(), is("first_name"));
var bRef = as(insensitive.right().fold(), BytesRef.class);
assertThat(bRef.utf8ToString(), is("VALÜ"));
as(filter.child(), EsRelation.class);
}
@Override
protected List<String> filteredWarnings() {
return withDefaultLimitWarning(super.filteredWarnings());
@ -5928,7 +6003,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase {
* \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18, language_name{f}#19]
*/
public void testLookupJoinPushDownFilterOnJoinKeyWithRename() {
assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
String query = """
FROM test
@ -5970,7 +6045,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase {
* \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18, language_name{f}#19]
*/
public void testLookupJoinPushDownFilterOnLeftSideField() {
assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
String query = """
FROM test
@ -6013,7 +6088,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase {
* \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18, language_name{f}#19]
*/
public void testLookupJoinPushDownDisabledForLookupField() {
assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
String query = """
FROM test
@ -6057,7 +6132,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase {
* \_EsRelation[languages_lookup][LOOKUP][language_code{f}#19, language_name{f}#20]
*/
public void testLookupJoinPushDownSeparatedForConjunctionBetweenLeftAndRightField() {
assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
String query = """
FROM test
@ -6108,7 +6183,7 @@ public class LogicalPlanOptimizerTests extends ESTestCase {
* \_EsRelation[languages_lookup][LOOKUP][language_code{f}#19, language_name{f}#20]
*/
public void testLookupJoinPushDownDisabledForDisjunctionBetweenLeftAndRightField() {
assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
String query = """
FROM test

View file

@ -174,6 +174,7 @@ import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_POINT;
import static org.elasticsearch.xpack.esql.core.type.DataType.CARTESIAN_SHAPE;
import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_POINT;
import static org.elasticsearch.xpack.esql.core.type.DataType.GEO_SHAPE;
import static org.elasticsearch.xpack.esql.core.util.TestUtils.stripThrough;
import static org.elasticsearch.xpack.esql.parser.ExpressionBuilder.MAX_EXPRESSION_DEPTH;
import static org.elasticsearch.xpack.esql.parser.LogicalPlanBuilder.MAX_QUERY_DEPTH;
import static org.hamcrest.Matchers.closeTo;
@ -1803,7 +1804,7 @@ public class PhysicalPlanOptimizerTests extends ESTestCase {
assertThat(source.estimatedRowSize(), equalTo(allFieldRowSize + Integer.BYTES));
QueryBuilder query = source.query();
assertNotNull(query);
assertNotNull(query); // TODO: verify query
}
/**
@ -1838,7 +1839,257 @@ public class PhysicalPlanOptimizerTests extends ESTestCase {
var source = source(extract.child());
QueryBuilder query = source.query();
assertNull(query);
assertNull(query); // TODO: verify query
}
public void testPushDownEqualsToUpper() {
doTestPushDownChangeCase("""
from test
| where to_upper(first_name) == "FOO"
""", """
{
"esql_single_value" : {
"field" : "first_name",
"next" : {
"term" : {
"first_name" : {
"value" : "FOO",
"case_insensitive" : true
}
}
},
"source" : "to_upper(first_name) == \\"FOO\\"@2:9"
}
}""");
}
/*
* LimitExec[1000[INTEGER]]
* \_ExchangeExec[[_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gender{f}#5, hire_date{f}#10, job{f}#11, job.raw{f}#12,
* languages{f}#6, last_name{f}#7, long_noidx{f}#13, salary{f}#8],false]
* \_ProjectExec[[_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gender{f}#5, hire_date{f}#10, job{f}#11, job.raw{f}#12,
* languages{f}#6, last_name{f}#7, long_noidx{f}#13, salary{f}#8]]
* \_FieldExtractExec[_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gen..]<[]>
* \_EsQueryExec[test], indexMode[standard], query[{...}}][_doc{f}#25], limit[1000], sort[] estimatedRowSize[332]
*/
private void doTestPushDownChangeCase(String esql, String expected) {
var plan = physicalPlan(esql);
var optimized = optimizedPlan(plan);
var topLimit = as(optimized, LimitExec.class);
var exchange = asRemoteExchange(topLimit.child());
var project = as(exchange.child(), ProjectExec.class);
var extractRest = as(project.child(), FieldExtractExec.class);
var source = source(extractRest.child());
assertThat(source.estimatedRowSize(), equalTo(allFieldRowSize + Integer.BYTES));
QueryBuilder query = source.query();
assertThat(stripThrough(query.toString()), is(stripThrough(expected)));
}
public void testPushDownEqualsToLower() {
doTestPushDownChangeCase("""
from test
| where to_lower(first_name) == "foo"
""", """
{
"esql_single_value" : {
"field" : "first_name",
"next" : {
"term" : {
"first_name" : {
"value" : "foo",
"case_insensitive" : true
}
}
},
"source" : "to_lower(first_name) == \\"foo\\"@2:9"
}
}""");
}
public void testPushDownNotEqualsToUpper() {
doTestPushDownChangeCase("""
from test
| where to_upper(first_name) != "FOO"
""", """
{
"esql_single_value" : {
"field" : "first_name",
"next" : {
"bool" : {
"must_not" : [
{
"term" : {
"first_name" : {
"value" : "FOO",
"case_insensitive" : true
}
}
}
],
"boost" : 1.0
}
},
"source" : "to_upper(first_name) != \\"FOO\\"@2:9"
}
}""");
}
public void testPushDownNotEqualsToLower() {
doTestPushDownChangeCase("""
from test
| where to_lower(first_name) != "foo"
""", """
{
"esql_single_value" : {
"field" : "first_name",
"next" : {
"bool" : {
"must_not" : [
{
"term" : {
"first_name" : {
"value" : "foo",
"case_insensitive" : true
}
}
}
],
"boost" : 1.0
}
},
"source" : "to_lower(first_name) != \\"foo\\"@2:9"
}
}""");
}
public void testPushDownChangeCaseMultiplePredicates() {
doTestPushDownChangeCase("""
from test
| where to_lower(first_name) != "foo" or to_upper(first_name) == "FOO" or emp_no > 10
""", """
{
"bool" : {
"should" : [
{
"esql_single_value" : {
"field" : "first_name",
"next" : {
"bool" : {
"must_not" : [
{
"term" : {
"first_name" : {
"value" : "foo",
"case_insensitive" : true
}
}
}
],
"boost" : 1.0
}
},
"source" : "to_lower(first_name) != \\"foo\\"@2:9"
}
},
{
"esql_single_value" : {
"field" : "first_name",
"next" : {
"term" : {
"first_name" : {
"value" : "FOO",
"case_insensitive" : true
}
}
},
"source" : "to_upper(first_name) == \\"FOO\\"@2:42"
}
},
{
"esql_single_value" : {
"field" : "emp_no",
"next" : {
"range" : {
"emp_no" : {
"gt" : 10,
"boost" : 1.0
}
}
},
"source" : "emp_no > 10@2:75"
}
}
],
"boost" : 1.0
}
}
""");
}
// same tree as with doTestPushDownChangeCase(), but with a topping EvalExec (for `x`)
public void testPushDownChangeCaseThroughEval() {
var esql = """
from test
| eval x = first_name
| where to_lower(x) == "foo"
""";
var plan = physicalPlan(esql);
var optimized = optimizedPlan(plan);
var eval = as(optimized, EvalExec.class);
var topLimit = as(eval.child(), LimitExec.class);
var exchange = asRemoteExchange(topLimit.child());
var project = as(exchange.child(), ProjectExec.class);
var extractRest = as(project.child(), FieldExtractExec.class);
var source = source(extractRest.child());
var expected = """
{
"esql_single_value" : {
"field" : "first_name",
"next" : {
"term" : {
"first_name" : {
"value" : "foo",
"case_insensitive" : true
}
}
},
"source" : "to_lower(x) == \\"foo\\"@3:9"
}
}""";
QueryBuilder query = source.query();
assertThat(stripThrough(query.toString()), is(stripThrough(expected)));
}
/*
* LimitExec[1000[INTEGER]]
* \_ExchangeExec[[_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gender{f}#5, hire_date{f}#10, job{f}#11, job.raw{f}#12,
* languages{f}#6, last_name{f}#7, long_noidx{f}#13, salary{f}#8],false]
* \_ProjectExec[[_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gender{f}#5, hire_date{f}#10, job{f}#11, job.raw{f}#12,
* languages{f}#6, last_name{f}#7, long_noidx{f}#13, salary{f}#8]]
* \_FieldExtractExec[_meta_field{f}#9, emp_no{f}#3, gender{f}#5, hire_da..]<[]>
* \_LimitExec[1000[INTEGER]]
* \_FilterExec[NOT(INSENSITIVEEQUALS(CONCAT(first_name{f}#4,[66 6f 6f][KEYWORD]),[66 6f 6f][KEYWORD]))]
* \_FieldExtractExec[first_name{f}#4]<[]>
* \_EsQueryExec[test], indexMode[standard], query[][_doc{f}#25], limit[], sort[] estimatedRowSize[332]
*/
public void testNoPushDownChangeCase() {
var plan = physicalPlan("""
from test
| where to_lower(concat(first_name, "foo")) != "foo"
""");
var optimized = optimizedPlan(plan);
var topLimit = as(optimized, LimitExec.class);
var exchange = asRemoteExchange(topLimit.child());
var project = as(exchange.child(), ProjectExec.class);
var fieldExtract = as(project.child(), FieldExtractExec.class);
var limit = as(fieldExtract.child(), LimitExec.class);
var filter = as(limit.child(), FilterExec.class);
var fieldExtract2 = as(filter.child(), FieldExtractExec.class);
var source = source(fieldExtract2.child());
assertThat(source.query(), nullValue());
}
public void testPushDownNotRLike() {
@ -2364,7 +2615,7 @@ public class PhysicalPlanOptimizerTests extends ESTestCase {
}
public void testVerifierOnMissingReferencesWithBinaryPlans() throws Exception {
assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
// Do not assert serialization:
// This will have a LookupJoinExec, which is not serializable because it doesn't leave the coordinator.
@ -7047,7 +7298,7 @@ public class PhysicalPlanOptimizerTests extends ESTestCase {
}
public void testLookupJoinFieldLoading() throws Exception {
assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
TestDataSource data = dataSetWithLookupIndices(Map.of("lookup_index", List.of("first_name", "foo", "bar", "baz")));
@ -7124,7 +7375,7 @@ public class PhysicalPlanOptimizerTests extends ESTestCase {
}
public void testLookupJoinFieldLoadingTwoLookups() throws Exception {
assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
TestDataSource data = dataSetWithLookupIndices(
Map.of(
@ -7178,7 +7429,7 @@ public class PhysicalPlanOptimizerTests extends ESTestCase {
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/119082")
public void testLookupJoinFieldLoadingTwoLookupsProjectInBetween() throws Exception {
assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
TestDataSource data = dataSetWithLookupIndices(
Map.of(
@ -7219,7 +7470,7 @@ public class PhysicalPlanOptimizerTests extends ESTestCase {
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/118778")
public void testLookupJoinFieldLoadingDropAllFields() throws Exception {
assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
TestDataSource data = dataSetWithLookupIndices(Map.of("lookup_index", List.of("first_name", "foo", "bar", "baz")));

View file

@ -1365,7 +1365,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase {
}
public void testLookupJoin() {
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
assertFieldNames(
"FROM employees | KEEP languages | RENAME languages AS language_code | LOOKUP JOIN languages_lookup ON language_code",
Set.of("languages", "languages.*", "language_code", "language_code.*"),
@ -1374,7 +1374,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase {
}
public void testLookupJoinKeep() {
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
assertFieldNames(
"""
FROM employees
@ -1388,7 +1388,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase {
}
public void testLookupJoinKeepWildcard() {
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
assertFieldNames(
"""
FROM employees
@ -1402,7 +1402,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase {
}
public void testMultiLookupJoin() {
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
assertFieldNames(
"""
FROM sample_data
@ -1415,7 +1415,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase {
}
public void testMultiLookupJoinKeepBefore() {
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
assertFieldNames(
"""
FROM sample_data
@ -1429,7 +1429,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase {
}
public void testMultiLookupJoinKeepBetween() {
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
assertFieldNames(
"""
FROM sample_data
@ -1454,7 +1454,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase {
}
public void testMultiLookupJoinKeepAfter() {
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
assertFieldNames(
"""
FROM sample_data
@ -1481,7 +1481,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase {
}
public void testMultiLookupJoinKeepAfterWildcard() {
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
assertFieldNames(
"""
FROM sample_data
@ -1495,7 +1495,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase {
}
public void testMultiLookupJoinSameIndex() {
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
assertFieldNames(
"""
FROM sample_data
@ -1509,7 +1509,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase {
}
public void testMultiLookupJoinSameIndexKeepBefore() {
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
assertFieldNames(
"""
FROM sample_data
@ -1524,7 +1524,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase {
}
public void testMultiLookupJoinSameIndexKeepBetween() {
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
assertFieldNames(
"""
FROM sample_data
@ -1550,7 +1550,7 @@ public class IndexResolverFieldNamesTests extends ESTestCase {
}
public void testMultiLookupJoinSameIndexKeepAfter() {
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V8.isEnabled());
assumeTrue("LOOKUP JOIN available as snapshot only", EsqlCapabilities.Cap.JOIN_LOOKUP_V9.isEnabled());
assertFieldNames(
"""
FROM sample_data

Some files were not shown because too many files have changed in this diff Show more